1use std::marker::PhantomData;
6
7use std::collections::{HashMap, VecDeque};
8
9#[allow(dead_code)]
11pub struct ChainedArena {
12 blocks: Vec<ArenaBlock>,
13 block_size: usize,
14 total_alloc: usize,
15}
16#[allow(dead_code)]
17impl ChainedArena {
18 pub fn new(block_size: usize) -> Self {
20 let first = ArenaBlock::new(block_size);
21 Self {
22 blocks: vec![first],
23 block_size,
24 total_alloc: 0,
25 }
26 }
27 pub fn alloc(&mut self, bytes: usize, align: usize) -> usize {
29 let last = self.blocks.len() - 1;
30 if let Some(offset) = self.blocks[last].try_alloc(bytes, align) {
31 self.total_alloc += bytes;
32 return offset + last * self.block_size;
33 }
34 let new_block_size = self.block_size.max(bytes + align);
35 let mut block = ArenaBlock::new(new_block_size);
36 let offset = block
37 .try_alloc(bytes, align)
38 .expect("arena block allocation must succeed");
39 let block_idx = self.blocks.len();
40 self.blocks.push(block);
41 self.total_alloc += bytes;
42 offset + block_idx * self.block_size
43 }
44 pub fn num_blocks(&self) -> usize {
46 self.blocks.len()
47 }
48 pub fn total_allocated(&self) -> usize {
50 self.total_alloc
51 }
52}
53#[derive(Debug, Clone)]
58pub struct ArenaMap<T, V> {
59 data: Vec<Option<V>>,
60 _marker: PhantomData<T>,
61}
62impl<T, V> ArenaMap<T, V> {
63 pub fn new() -> Self {
65 Self {
66 data: Vec::new(),
67 _marker: PhantomData,
68 }
69 }
70 pub fn insert(&mut self, idx: Idx<T>, value: V) {
72 let i = idx.to_usize();
73 if i >= self.data.len() {
74 self.data.resize_with(i + 1, || None);
75 }
76 self.data[i] = Some(value);
77 }
78 pub fn get(&self, idx: Idx<T>) -> Option<&V> {
80 self.data.get(idx.to_usize())?.as_ref()
81 }
82 pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut V> {
84 self.data.get_mut(idx.to_usize())?.as_mut()
85 }
86 pub fn remove(&mut self, idx: Idx<T>) -> Option<V> {
88 self.data.get_mut(idx.to_usize())?.take()
89 }
90 pub fn contains(&self, idx: Idx<T>) -> bool {
92 self.get(idx).is_some()
93 }
94 pub fn count(&self) -> usize {
96 self.data.iter().filter(|e| e.is_some()).count()
97 }
98 pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> {
100 self.data
101 .iter()
102 .enumerate()
103 .filter_map(|(i, v)| v.as_ref().map(|val| (Idx::new(i as u32), val)))
104 }
105}
106#[allow(dead_code)]
108pub struct ScopeStack {
109 names: Vec<String>,
110}
111#[allow(dead_code)]
112impl ScopeStack {
113 pub fn new() -> Self {
115 Self { names: Vec::new() }
116 }
117 pub fn push(&mut self, name: impl Into<String>) {
119 self.names.push(name.into());
120 }
121 pub fn pop(&mut self) -> Option<String> {
123 self.names.pop()
124 }
125 pub fn current(&self) -> Option<&str> {
127 self.names.last().map(|s| s.as_str())
128 }
129 pub fn depth(&self) -> usize {
131 self.names.len()
132 }
133 pub fn is_empty(&self) -> bool {
135 self.names.is_empty()
136 }
137 pub fn path(&self) -> String {
139 self.names.join(".")
140 }
141}
142#[allow(dead_code)]
144pub struct StringInterner {
145 strings: Vec<String>,
146 map: std::collections::HashMap<String, u32>,
147}
148#[allow(dead_code)]
149impl StringInterner {
150 pub fn new() -> Self {
152 Self {
153 strings: Vec::new(),
154 map: std::collections::HashMap::new(),
155 }
156 }
157 pub fn intern(&mut self, s: &str) -> u32 {
159 if let Some(&id) = self.map.get(s) {
160 return id;
161 }
162 let id = self.strings.len() as u32;
163 self.strings.push(s.to_string());
164 self.map.insert(s.to_string(), id);
165 id
166 }
167 pub fn get(&self, id: u32) -> Option<&str> {
169 self.strings.get(id as usize).map(|s| s.as_str())
170 }
171 pub fn len(&self) -> usize {
173 self.strings.len()
174 }
175 pub fn is_empty(&self) -> bool {
177 self.strings.is_empty()
178 }
179}
180#[allow(dead_code)]
182pub struct DiagMeta {
183 pub(super) entries: Vec<(String, String)>,
184}
185#[allow(dead_code)]
186impl DiagMeta {
187 pub fn new() -> Self {
189 Self {
190 entries: Vec::new(),
191 }
192 }
193 pub fn add(&mut self, key: impl Into<String>, val: impl Into<String>) {
195 self.entries.push((key.into(), val.into()));
196 }
197 pub fn get(&self, key: &str) -> Option<&str> {
199 self.entries
200 .iter()
201 .find(|(k, _)| k == key)
202 .map(|(_, v)| v.as_str())
203 }
204 pub fn len(&self) -> usize {
206 self.entries.len()
207 }
208 pub fn is_empty(&self) -> bool {
210 self.entries.is_empty()
211 }
212}
213#[allow(dead_code)]
215#[allow(missing_docs)]
216pub struct StatCache<K: std::hash::Hash + Eq + Clone, V: Clone> {
217 pub inner: SimpleLruCache<K, V>,
219 pub hits: u64,
221 pub misses: u64,
223}
224#[allow(dead_code)]
225impl<K: std::hash::Hash + Eq + Clone, V: Clone> StatCache<K, V> {
226 pub fn new(capacity: usize) -> Self {
228 Self {
229 inner: SimpleLruCache::new(capacity),
230 hits: 0,
231 misses: 0,
232 }
233 }
234 pub fn get(&mut self, key: &K) -> Option<&V> {
236 let result = self.inner.get(key);
237 if result.is_some() {
238 self.hits += 1;
239 } else {
240 self.misses += 1;
241 }
242 None
243 }
244 pub fn put(&mut self, key: K, val: V) {
246 self.inner.put(key, val);
247 }
248 pub fn hit_rate(&self) -> f64 {
250 let total = self.hits + self.misses;
251 if total == 0 {
252 return 0.0;
253 }
254 self.hits as f64 / total as f64
255 }
256}
257#[allow(dead_code)]
259pub struct TwoGenerationArena {
260 pub(crate) nursery: GrowableArena,
261 pub(crate) stable: GrowableArena,
262 promotions: usize,
263}
264#[allow(dead_code)]
265impl TwoGenerationArena {
266 pub fn new(nursery_cap: usize, stable_cap: usize) -> Self {
268 Self {
269 nursery: GrowableArena::new(nursery_cap),
270 stable: GrowableArena::new(stable_cap),
271 promotions: 0,
272 }
273 }
274 pub fn alloc_nursery(&mut self, size: usize, align: usize) -> usize {
276 self.nursery.alloc(size, align)
277 }
278 pub fn promote(&mut self, size: usize, align: usize) -> usize {
280 self.promotions += 1;
281 self.stable.alloc(size, align)
282 }
283 pub fn minor_gc(&mut self) {
285 self.nursery.reset();
286 }
287 pub fn num_promotions(&self) -> usize {
289 self.promotions
290 }
291}
292#[allow(dead_code)]
294#[allow(missing_docs)]
295pub struct ArenaCheckpoint {
296 pub watermark: usize,
298}
299#[allow(dead_code)]
300impl ArenaCheckpoint {
301 pub fn create(arena: &LinearArena) -> Self {
303 Self {
304 watermark: arena.used(),
305 }
306 }
307 pub fn restore(self, arena: &mut LinearArena) {
309 arena.top = self.watermark;
310 }
311}
312#[allow(dead_code)]
314#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
315pub struct Timestamp(u64);
316#[allow(dead_code)]
317impl Timestamp {
318 pub const fn from_us(us: u64) -> Self {
320 Self(us)
321 }
322 pub fn as_us(self) -> u64 {
324 self.0
325 }
326 pub fn elapsed_since(self, earlier: Timestamp) -> u64 {
328 self.0.saturating_sub(earlier.0)
329 }
330}
331#[derive(Debug)]
336pub struct ScopedArena<T> {
337 inner: Arena<T>,
338}
339impl<T> ScopedArena<T> {
340 pub fn new() -> Self {
342 Self {
343 inner: Arena::new(),
344 }
345 }
346 pub fn alloc(&mut self, value: T) -> Idx<T> {
348 self.inner.alloc(value)
349 }
350 pub fn get(&self, idx: Idx<T>) -> &T {
352 self.inner.get(idx)
353 }
354 pub fn checkpoint(&self) -> u32 {
356 self.inner.len() as u32
357 }
358 pub fn rollback(&mut self, checkpoint: u32) {
362 self.inner.data.truncate(checkpoint as usize);
363 }
364 pub fn len(&self) -> usize {
366 self.inner.len()
367 }
368 pub fn is_empty(&self) -> bool {
370 self.inner.is_empty()
371 }
372}
373#[allow(dead_code)]
375pub struct LinearArena {
376 pub(crate) buf: Vec<u8>,
377 top: usize,
378 stats: ArenaStatsExt,
379}
380#[allow(dead_code)]
381impl LinearArena {
382 pub fn new(capacity: usize) -> Self {
384 Self {
385 buf: vec![0u8; capacity],
386 top: 0,
387 stats: ArenaStatsExt::new(),
388 }
389 }
390 pub fn alloc(&mut self, size: usize, align: usize) -> Option<usize> {
393 let aligned = (self.top + align - 1) & !(align - 1);
394 let end = aligned.checked_add(size)?;
395 if end > self.buf.len() {
396 return None;
397 }
398 let waste = aligned - self.top;
399 self.top = end;
400 self.stats.bytes_allocated += size;
401 self.stats.alloc_count += 1;
402 self.stats.wasted_bytes += waste;
403 Some(aligned)
404 }
405 pub fn reset(&mut self) {
407 self.top = 0;
408 self.stats = ArenaStatsExt::new();
409 }
410 pub fn used(&self) -> usize {
412 self.top
413 }
414 pub fn capacity(&self) -> usize {
416 self.buf.len()
417 }
418 pub fn stats(&self) -> &ArenaStatsExt {
420 &self.stats
421 }
422}
423#[derive(Debug, Clone, Copy, PartialEq, Eq)]
425pub struct ArenaStats {
426 pub len: usize,
428 pub capacity: usize,
430}
431impl ArenaStats {
432 pub fn utilisation(&self) -> f64 {
434 if self.capacity == 0 {
435 0.0
436 } else {
437 self.len as f64 / self.capacity as f64
438 }
439 }
440 pub fn free(&self) -> usize {
442 self.capacity.saturating_sub(self.len)
443 }
444}
445#[allow(dead_code)]
447pub struct AnnotationTable {
448 map: std::collections::HashMap<String, Vec<String>>,
449}
450#[allow(dead_code)]
451impl AnnotationTable {
452 pub fn new() -> Self {
454 Self {
455 map: std::collections::HashMap::new(),
456 }
457 }
458 pub fn annotate(&mut self, key: impl Into<String>, val: impl Into<String>) {
460 self.map.entry(key.into()).or_default().push(val.into());
461 }
462 pub fn get_all(&self, key: &str) -> &[String] {
464 self.map.get(key).map(|v| v.as_slice()).unwrap_or(&[])
465 }
466 pub fn num_keys(&self) -> usize {
468 self.map.len()
469 }
470 pub fn has(&self, key: &str) -> bool {
472 self.map.contains_key(key)
473 }
474}
475#[derive(Debug)]
480pub struct BumpArena {
481 buf: Vec<u8>,
482 pos: usize,
483}
484impl BumpArena {
485 pub fn with_capacity(cap: usize) -> Self {
487 Self {
488 buf: vec![0u8; cap],
489 pos: 0,
490 }
491 }
492 pub fn alloc_bytes(&mut self, size: usize) -> Option<usize> {
496 if self.pos + size > self.buf.len() {
497 None
498 } else {
499 let offset = self.pos;
500 self.pos += size;
501 Some(offset)
502 }
503 }
504 pub fn write_at(&mut self, offset: usize, data: &[u8]) {
506 self.buf[offset..offset + data.len()].copy_from_slice(data);
507 }
508 pub fn read_at(&self, offset: usize, len: usize) -> &[u8] {
510 &self.buf[offset..offset + len]
511 }
512 pub fn reset(&mut self) {
514 self.pos = 0;
515 }
516 pub fn used(&self) -> usize {
518 self.pos
519 }
520 pub fn capacity(&self) -> usize {
522 self.buf.len()
523 }
524 pub fn remaining(&self) -> usize {
526 self.buf.len().saturating_sub(self.pos)
527 }
528}
529#[derive(Debug)]
533pub struct ArenaPool<T> {
534 pub(super) pool: Vec<Arena<T>>,
535}
536impl<T> ArenaPool<T> {
537 pub fn new() -> Self {
539 Self { pool: Vec::new() }
540 }
541 pub fn acquire(&mut self) -> Arena<T> {
543 self.pool.pop().unwrap_or_default()
544 }
545 pub fn release(&mut self, mut arena: Arena<T>) {
547 arena.clear();
548 self.pool.push(arena);
549 }
550 pub fn pool_size(&self) -> usize {
552 self.pool.len()
553 }
554}
555#[allow(dead_code)]
557pub struct SimpleLruCache<K: std::hash::Hash + Eq + Clone, V: Clone> {
558 capacity: usize,
559 map: std::collections::HashMap<K, usize>,
560 keys: Vec<K>,
561 vals: Vec<V>,
562 order: Vec<usize>,
563}
564#[allow(dead_code)]
565impl<K: std::hash::Hash + Eq + Clone, V: Clone> SimpleLruCache<K, V> {
566 pub fn new(capacity: usize) -> Self {
568 assert!(capacity > 0);
569 Self {
570 capacity,
571 map: std::collections::HashMap::new(),
572 keys: Vec::new(),
573 vals: Vec::new(),
574 order: Vec::new(),
575 }
576 }
577 pub fn put(&mut self, key: K, val: V) {
579 if let Some(&idx) = self.map.get(&key) {
580 self.vals[idx] = val;
581 self.order.retain(|&x| x != idx);
582 self.order.insert(0, idx);
583 return;
584 }
585 if self.keys.len() >= self.capacity {
586 let evict_idx = *self
587 .order
588 .last()
589 .expect("order list must be non-empty before eviction");
590 self.map.remove(&self.keys[evict_idx]);
591 self.order.pop();
592 self.keys[evict_idx] = key.clone();
593 self.vals[evict_idx] = val;
594 self.map.insert(key, evict_idx);
595 self.order.insert(0, evict_idx);
596 } else {
597 let idx = self.keys.len();
598 self.keys.push(key.clone());
599 self.vals.push(val);
600 self.map.insert(key, idx);
601 self.order.insert(0, idx);
602 }
603 }
604 pub fn get(&mut self, key: &K) -> Option<&V> {
606 let idx = *self.map.get(key)?;
607 self.order.retain(|&x| x != idx);
608 self.order.insert(0, idx);
609 Some(&self.vals[idx])
610 }
611 pub fn len(&self) -> usize {
613 self.keys.len()
614 }
615 pub fn is_empty(&self) -> bool {
617 self.keys.is_empty()
618 }
619}
620#[allow(dead_code)]
622pub struct IdDispenser<T> {
623 next: u32,
624 _phantom: std::marker::PhantomData<T>,
625}
626#[allow(dead_code)]
627impl<T> IdDispenser<T> {
628 pub fn new() -> Self {
630 Self {
631 next: 0,
632 _phantom: std::marker::PhantomData,
633 }
634 }
635 #[allow(clippy::should_implement_trait)]
637 pub fn next(&mut self) -> TypedId<T> {
638 let id = TypedId::new(self.next);
639 self.next += 1;
640 id
641 }
642 pub fn count(&self) -> u32 {
644 self.next
645 }
646}
647#[allow(dead_code)]
649pub struct GrowableArena {
650 pub(crate) data: Vec<u8>,
651 top: usize,
652 count: usize,
653}
654#[allow(dead_code)]
655impl GrowableArena {
656 pub fn new(initial: usize) -> Self {
658 Self {
659 data: vec![0u8; initial.max(16)],
660 top: 0,
661 count: 0,
662 }
663 }
664 pub fn alloc(&mut self, size: usize, align: usize) -> usize {
666 let aligned = (self.top + align - 1) & !(align - 1);
667 let end = aligned + size;
668 if end > self.data.len() {
669 let new_cap = (self.data.len() * 2).max(end);
670 self.data.resize(new_cap, 0);
671 }
672 self.top = end;
673 self.count += 1;
674 aligned
675 }
676 pub fn used(&self) -> usize {
678 self.top
679 }
680 pub fn count(&self) -> usize {
682 self.count
683 }
684 pub fn reset(&mut self) {
686 self.top = 0;
687 self.count = 0;
688 }
689}
690#[allow(dead_code)]
692pub struct TypedArena<T> {
693 items: Vec<T>,
694}
695#[allow(dead_code)]
696impl<T> TypedArena<T> {
697 pub fn new() -> Self {
699 Self { items: Vec::new() }
700 }
701 pub fn alloc(&mut self, val: T) -> &T {
703 self.items.push(val);
704 self.items.last().expect("items list must be non-empty")
705 }
706 pub fn len(&self) -> usize {
708 self.items.len()
709 }
710 pub fn is_empty(&self) -> bool {
712 self.items.is_empty()
713 }
714 pub fn clear(&mut self) {
716 self.items.clear();
717 }
718}
719#[allow(dead_code)]
721pub struct Slot<T> {
722 inner: Option<T>,
723}
724#[allow(dead_code)]
725impl<T> Slot<T> {
726 pub fn empty() -> Self {
728 Self { inner: None }
729 }
730 pub fn fill(&mut self, val: T) {
732 assert!(self.inner.is_none(), "Slot: already filled");
733 self.inner = Some(val);
734 }
735 pub fn get(&self) -> Option<&T> {
737 self.inner.as_ref()
738 }
739 pub fn is_filled(&self) -> bool {
741 self.inner.is_some()
742 }
743 pub fn take(&mut self) -> Option<T> {
745 self.inner.take()
746 }
747 pub fn get_or_fill_with(&mut self, f: impl FnOnce() -> T) -> &T {
749 if self.inner.is_none() {
750 self.inner = Some(f());
751 }
752 self.inner
753 .as_ref()
754 .expect("inner value must be initialized before access")
755 }
756}
757#[derive(Clone, Copy)]
761pub struct IdxRange<T> {
762 pub start: Idx<T>,
764 pub end: Idx<T>,
766}
767impl<T> IdxRange<T> {
768 pub fn new(start: Idx<T>, end: Idx<T>) -> Self {
770 Self { start, end }
771 }
772 pub fn empty_at(idx: Idx<T>) -> Self {
774 let r = idx.raw;
775 Self {
776 start: Idx::new(r),
777 end: Idx::new(r),
778 }
779 }
780 pub fn len(self) -> usize {
782 (self.end.raw as usize).saturating_sub(self.start.raw as usize)
783 }
784 pub fn is_empty(self) -> bool {
786 self.start.raw >= self.end.raw
787 }
788 pub fn contains(self, idx: Idx<T>) -> bool {
790 idx.raw >= self.start.raw && idx.raw < self.end.raw
791 }
792 pub fn iter(self) -> impl Iterator<Item = Idx<T>> {
794 (self.start.raw..self.end.raw).map(Idx::new)
795 }
796}
797#[allow(dead_code)]
799pub struct ArenaBlock {
800 data: Vec<u8>,
801 used: usize,
802}
803#[allow(dead_code)]
804impl ArenaBlock {
805 pub fn new(size: usize) -> Self {
807 Self {
808 data: vec![0u8; size],
809 used: 0,
810 }
811 }
812 pub fn try_alloc(&mut self, bytes: usize, align: usize) -> Option<usize> {
814 let base = (self.used + align - 1) & !(align - 1);
815 let end = base.checked_add(bytes)?;
816 if end > self.data.len() {
817 return None;
818 }
819 self.used = end;
820 Some(base)
821 }
822 pub fn remaining(&self) -> usize {
824 self.data.len() - self.used
825 }
826 pub fn block_size(&self) -> usize {
828 self.data.len()
829 }
830}
831#[derive(Debug)]
836pub struct InterningArena<T: PartialEq + Clone> {
837 inner: Arena<T>,
838}
839impl<T: PartialEq + Clone> InterningArena<T> {
840 pub fn new() -> Self {
842 Self {
843 inner: Arena::new(),
844 }
845 }
846 pub fn intern(&mut self, value: T) -> Idx<T> {
851 for (i, v) in self.inner.data.iter().enumerate() {
852 if *v == value {
853 return Idx::new(i as u32);
854 }
855 }
856 self.inner.alloc(value)
857 }
858 pub fn get(&self, idx: Idx<T>) -> &T {
860 self.inner.get(idx)
861 }
862 pub fn len(&self) -> usize {
864 self.inner.len()
865 }
866 pub fn is_empty(&self) -> bool {
868 self.inner.is_empty()
869 }
870 pub fn contains(&self, value: &T) -> bool {
872 self.inner.data.iter().any(|v| v == value)
873 }
874 pub fn stats(&self) -> ArenaStats {
876 self.inner.stats()
877 }
878}
879#[allow(dead_code)]
881pub struct LoopClock {
882 start: std::time::Instant,
883 iters: u64,
884}
885#[allow(dead_code)]
886impl LoopClock {
887 pub fn start() -> Self {
889 Self {
890 start: std::time::Instant::now(),
891 iters: 0,
892 }
893 }
894 pub fn tick(&mut self) {
896 self.iters += 1;
897 }
898 pub fn elapsed_us(&self) -> f64 {
900 self.start.elapsed().as_secs_f64() * 1e6
901 }
902 pub fn avg_us_per_iter(&self) -> f64 {
904 if self.iters == 0 {
905 return 0.0;
906 }
907 self.elapsed_us() / self.iters as f64
908 }
909 pub fn iters(&self) -> u64 {
911 self.iters
912 }
913}
914#[allow(dead_code)]
916pub struct MemoSlot<T: Clone> {
917 cached: Option<T>,
918}
919#[allow(dead_code)]
920impl<T: Clone> MemoSlot<T> {
921 pub fn new() -> Self {
923 Self { cached: None }
924 }
925 pub fn get_or_compute(&mut self, f: impl FnOnce() -> T) -> &T {
927 if self.cached.is_none() {
928 self.cached = Some(f());
929 }
930 self.cached
931 .as_ref()
932 .expect("cached value must be initialized before access")
933 }
934 pub fn invalidate(&mut self) {
936 self.cached = None;
937 }
938 pub fn is_cached(&self) -> bool {
940 self.cached.is_some()
941 }
942}
943#[derive(Debug)]
948pub struct Arena<T> {
949 data: Vec<T>,
950}
951impl<T> Arena<T> {
952 #[inline]
954 pub fn new() -> Self {
955 Self { data: Vec::new() }
956 }
957 #[inline]
959 pub fn with_capacity(capacity: usize) -> Self {
960 Self {
961 data: Vec::with_capacity(capacity),
962 }
963 }
964 #[inline]
966 pub fn alloc(&mut self, value: T) -> Idx<T> {
967 let idx = self.data.len();
968 assert!(idx < u32::MAX as usize, "arena overflow");
969 self.data.push(value);
970 Idx::new(idx as u32)
971 }
972 pub fn alloc_many(&mut self, values: impl IntoIterator<Item = T>) -> IdxRange<T> {
974 let start = Idx::new(self.data.len() as u32);
975 self.data.extend(values);
976 let end = Idx::new(self.data.len() as u32);
977 IdxRange::new(start, end)
978 }
979 #[inline]
984 pub fn get(&self, idx: Idx<T>) -> &T {
985 &self.data[idx.raw as usize]
986 }
987 #[inline]
989 pub fn get_mut(&mut self, idx: Idx<T>) -> &mut T {
990 &mut self.data[idx.raw as usize]
991 }
992 pub fn get_range(&self, range: IdxRange<T>) -> &[T] {
994 &self.data[range.start.to_usize()..range.end.to_usize()]
995 }
996 #[inline]
998 pub fn len(&self) -> usize {
999 self.data.len()
1000 }
1001 #[inline]
1003 pub fn is_empty(&self) -> bool {
1004 self.data.is_empty()
1005 }
1006 pub fn next_idx(&self) -> Idx<T> {
1008 Idx::new(self.data.len() as u32)
1009 }
1010 pub fn iter_indexed(&self) -> impl Iterator<Item = (Idx<T>, &T)> {
1012 self.data
1013 .iter()
1014 .enumerate()
1015 .map(|(i, v)| (Idx::new(i as u32), v))
1016 }
1017 pub fn iter(&self) -> impl Iterator<Item = &T> {
1019 self.data.iter()
1020 }
1021 pub fn stats(&self) -> ArenaStats {
1023 ArenaStats {
1024 len: self.data.len(),
1025 capacity: self.data.capacity(),
1026 }
1027 }
1028 pub fn shrink_to_fit(&mut self) {
1030 self.data.shrink_to_fit();
1031 }
1032 pub fn clear(&mut self) {
1034 self.data.clear();
1035 }
1036}
1037#[allow(dead_code)]
1039pub struct BiMap<A: std::hash::Hash + Eq + Clone, B: std::hash::Hash + Eq + Clone> {
1040 forward: std::collections::HashMap<A, B>,
1041 backward: std::collections::HashMap<B, A>,
1042}
1043#[allow(dead_code)]
1044impl<A: std::hash::Hash + Eq + Clone, B: std::hash::Hash + Eq + Clone> BiMap<A, B> {
1045 pub fn new() -> Self {
1047 Self {
1048 forward: std::collections::HashMap::new(),
1049 backward: std::collections::HashMap::new(),
1050 }
1051 }
1052 pub fn insert(&mut self, a: A, b: B) {
1054 self.forward.insert(a.clone(), b.clone());
1055 self.backward.insert(b, a);
1056 }
1057 pub fn get_b(&self, a: &A) -> Option<&B> {
1059 self.forward.get(a)
1060 }
1061 pub fn get_a(&self, b: &B) -> Option<&A> {
1063 self.backward.get(b)
1064 }
1065 pub fn len(&self) -> usize {
1067 self.forward.len()
1068 }
1069 pub fn is_empty(&self) -> bool {
1071 self.forward.is_empty()
1072 }
1073}
1074#[allow(dead_code)]
1076pub struct ArenaString {
1077 pub(crate) offset: usize,
1078 len: usize,
1079}
1080#[allow(dead_code)]
1081impl ArenaString {
1082 pub fn store(arena: &mut LinearArena, s: &str) -> Option<Self> {
1084 let bytes = s.as_bytes();
1085 let offset = arena.alloc(bytes.len() + 1, 1)?;
1086 let start = offset;
1087 for (i, &b) in bytes.iter().enumerate() {
1088 arena.buf[start + i] = b;
1089 }
1090 arena.buf[start + bytes.len()] = 0;
1091 Some(Self {
1092 offset,
1093 len: bytes.len(),
1094 })
1095 }
1096 pub fn len(&self) -> usize {
1098 self.len
1099 }
1100 pub fn is_empty(&self) -> bool {
1102 self.len == 0
1103 }
1104}
1105#[allow(dead_code)]
1107pub struct IntervalSet {
1108 intervals: Vec<(i64, i64)>,
1109}
1110#[allow(dead_code)]
1111impl IntervalSet {
1112 pub fn new() -> Self {
1114 Self {
1115 intervals: Vec::new(),
1116 }
1117 }
1118 pub fn add(&mut self, lo: i64, hi: i64) {
1120 if lo > hi {
1121 return;
1122 }
1123 let mut new_lo = lo;
1124 let mut new_hi = hi;
1125 let mut i = 0;
1126 while i < self.intervals.len() {
1127 let (il, ih) = self.intervals[i];
1128 if ih < new_lo - 1 {
1129 i += 1;
1130 continue;
1131 }
1132 if il > new_hi + 1 {
1133 break;
1134 }
1135 new_lo = new_lo.min(il);
1136 new_hi = new_hi.max(ih);
1137 self.intervals.remove(i);
1138 }
1139 self.intervals.insert(i, (new_lo, new_hi));
1140 }
1141 pub fn contains(&self, x: i64) -> bool {
1143 self.intervals.iter().any(|&(lo, hi)| x >= lo && x <= hi)
1144 }
1145 pub fn num_intervals(&self) -> usize {
1147 self.intervals.len()
1148 }
1149 pub fn cardinality(&self) -> i64 {
1151 self.intervals.iter().map(|&(lo, hi)| hi - lo + 1).sum()
1152 }
1153}
1154#[allow(dead_code)]
1156pub struct MemoryRegionRegistry {
1157 regions: Vec<MemoryRegion>,
1158}
1159#[allow(dead_code)]
1160impl MemoryRegionRegistry {
1161 pub fn new() -> Self {
1163 Self {
1164 regions: Vec::new(),
1165 }
1166 }
1167 pub fn add(&mut self, region: MemoryRegion) {
1169 self.regions.push(region);
1170 }
1171 pub fn find(&self, addr: usize) -> Option<&MemoryRegion> {
1173 self.regions.iter().find(|r| r.contains(addr))
1174 }
1175 pub fn len(&self) -> usize {
1177 self.regions.len()
1178 }
1179 pub fn is_empty(&self) -> bool {
1181 self.regions.is_empty()
1182 }
1183}
1184#[derive(Debug, Default)]
1189pub struct DoubleArena<A, B> {
1190 pub first: Arena<A>,
1192 pub second: Arena<B>,
1194}
1195impl<A, B> DoubleArena<A, B> {
1196 pub fn new() -> Self {
1198 Self {
1199 first: Arena::new(),
1200 second: Arena::new(),
1201 }
1202 }
1203 pub fn alloc_pair(&mut self, a: A, b: B) -> (Idx<A>, Idx<B>) {
1208 let ia = self.first.alloc(a);
1209 let ib = self.second.alloc(b);
1210 (ia, ib)
1211 }
1212 pub fn total_len(&self) -> usize {
1214 self.first.len() + self.second.len()
1215 }
1216}
1217#[allow(dead_code)]
1219#[allow(missing_docs)]
1220pub struct MemoryRegion {
1221 pub label: String,
1223 pub base: usize,
1225 pub size: usize,
1227 pub active: bool,
1229}
1230#[allow(dead_code)]
1231impl MemoryRegion {
1232 pub fn new(label: impl Into<String>, base: usize, size: usize) -> Self {
1234 Self {
1235 label: label.into(),
1236 base,
1237 size,
1238 active: false,
1239 }
1240 }
1241 pub fn activate(&mut self) {
1243 self.active = true;
1244 }
1245 pub fn deactivate(&mut self) {
1247 self.active = false;
1248 }
1249 pub fn end(&self) -> usize {
1251 self.base + self.size
1252 }
1253 pub fn contains(&self, addr: usize) -> bool {
1255 addr >= self.base && addr < self.end()
1256 }
1257}
1258#[allow(dead_code)]
1260pub struct SparseBitSet {
1261 words: Vec<u64>,
1262}
1263#[allow(dead_code)]
1264impl SparseBitSet {
1265 pub fn new(capacity: usize) -> Self {
1267 let words = (capacity + 63) / 64;
1268 Self {
1269 words: vec![0u64; words],
1270 }
1271 }
1272 pub fn set(&mut self, i: usize) {
1274 let word = i / 64;
1275 let bit = i % 64;
1276 if word < self.words.len() {
1277 self.words[word] |= 1u64 << bit;
1278 }
1279 }
1280 pub fn clear(&mut self, i: usize) {
1282 let word = i / 64;
1283 let bit = i % 64;
1284 if word < self.words.len() {
1285 self.words[word] &= !(1u64 << bit);
1286 }
1287 }
1288 pub fn get(&self, i: usize) -> bool {
1290 let word = i / 64;
1291 let bit = i % 64;
1292 self.words.get(word).is_some_and(|w| w & (1u64 << bit) != 0)
1293 }
1294 pub fn count_ones(&self) -> u32 {
1296 self.words.iter().map(|w| w.count_ones()).sum()
1297 }
1298 pub fn union(&self, other: &SparseBitSet) -> SparseBitSet {
1300 let len = self.words.len().max(other.words.len());
1301 let mut result = SparseBitSet {
1302 words: vec![0u64; len],
1303 };
1304 for i in 0..self.words.len() {
1305 result.words[i] |= self.words[i];
1306 }
1307 for i in 0..other.words.len() {
1308 result.words[i] |= other.words[i];
1309 }
1310 result
1311 }
1312}
1313#[allow(dead_code)]
1315pub struct WorkQueue<T> {
1316 items: std::collections::VecDeque<T>,
1317}
1318#[allow(dead_code)]
1319impl<T> WorkQueue<T> {
1320 pub fn new() -> Self {
1322 Self {
1323 items: std::collections::VecDeque::new(),
1324 }
1325 }
1326 pub fn enqueue(&mut self, item: T) {
1328 self.items.push_back(item);
1329 }
1330 pub fn dequeue(&mut self) -> Option<T> {
1332 self.items.pop_front()
1333 }
1334 pub fn is_empty(&self) -> bool {
1336 self.items.is_empty()
1337 }
1338 pub fn len(&self) -> usize {
1340 self.items.len()
1341 }
1342}
1343#[allow(dead_code)]
1345pub struct FrequencyTable<T: std::hash::Hash + Eq + Clone> {
1346 counts: std::collections::HashMap<T, u64>,
1347}
1348#[allow(dead_code)]
1349impl<T: std::hash::Hash + Eq + Clone> FrequencyTable<T> {
1350 pub fn new() -> Self {
1352 Self {
1353 counts: std::collections::HashMap::new(),
1354 }
1355 }
1356 pub fn record(&mut self, item: T) {
1358 *self.counts.entry(item).or_insert(0) += 1;
1359 }
1360 pub fn freq(&self, item: &T) -> u64 {
1362 self.counts.get(item).copied().unwrap_or(0)
1363 }
1364 pub fn most_frequent(&self) -> Option<(&T, u64)> {
1366 self.counts
1367 .iter()
1368 .max_by_key(|(_, &v)| v)
1369 .map(|(k, &v)| (k, v))
1370 }
1371 pub fn total(&self) -> u64 {
1373 self.counts.values().sum()
1374 }
1375 pub fn distinct(&self) -> usize {
1377 self.counts.len()
1378 }
1379}
1380#[allow(dead_code)]
1382#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1383pub struct TypedId<T> {
1384 pub(super) id: u32,
1385 _phantom: std::marker::PhantomData<T>,
1386}
1387#[allow(dead_code)]
1388impl<T> TypedId<T> {
1389 pub const fn new(id: u32) -> Self {
1391 Self {
1392 id,
1393 _phantom: std::marker::PhantomData,
1394 }
1395 }
1396 pub fn raw(&self) -> u32 {
1398 self.id
1399 }
1400}
1401#[allow(dead_code)]
1403pub struct ScopedArenaExt {
1404 pub(crate) inner: LinearArena,
1405 watermarks: Vec<usize>,
1406}
1407#[allow(dead_code)]
1408impl ScopedArenaExt {
1409 pub fn new(capacity: usize) -> Self {
1411 Self {
1412 inner: LinearArena::new(capacity),
1413 watermarks: Vec::new(),
1414 }
1415 }
1416 pub fn push_scope(&mut self) {
1418 self.watermarks.push(self.inner.used());
1419 }
1420 pub fn pop_scope(&mut self) {
1423 let wm = self.watermarks.pop().expect("ScopedArena: no active scope");
1424 self.inner.top = wm;
1425 }
1426 pub fn alloc(&mut self, size: usize, align: usize) -> Option<usize> {
1428 self.inner.alloc(size, align)
1429 }
1430 pub fn scope_depth(&self) -> usize {
1432 self.watermarks.len()
1433 }
1434}
1435#[allow(dead_code)]
1437pub struct WorkStack<T> {
1438 items: Vec<T>,
1439}
1440#[allow(dead_code)]
1441impl<T> WorkStack<T> {
1442 pub fn new() -> Self {
1444 Self { items: Vec::new() }
1445 }
1446 pub fn push(&mut self, item: T) {
1448 self.items.push(item);
1449 }
1450 pub fn pop(&mut self) -> Option<T> {
1452 self.items.pop()
1453 }
1454 pub fn is_empty(&self) -> bool {
1456 self.items.is_empty()
1457 }
1458 pub fn len(&self) -> usize {
1460 self.items.len()
1461 }
1462}
1463#[allow(dead_code)]
1467pub struct ArenaVec<T: Copy> {
1468 base: usize,
1469 length: usize,
1470 _t: std::marker::PhantomData<T>,
1471}
1472#[allow(dead_code)]
1473impl<T: Copy> ArenaVec<T> {
1474 pub fn new(arena: &mut LinearArena, capacity: usize) -> Option<Self> {
1476 let base = arena.alloc(
1477 capacity * std::mem::size_of::<T>(),
1478 std::mem::align_of::<T>(),
1479 )?;
1480 Some(Self {
1481 base,
1482 length: 0,
1483 _t: std::marker::PhantomData,
1484 })
1485 }
1486 pub fn len(&self) -> usize {
1488 self.length
1489 }
1490 pub fn is_empty(&self) -> bool {
1492 self.length == 0
1493 }
1494}
1495#[allow(dead_code)]
1497#[allow(missing_docs)]
1498pub struct ArenaStatsExt {
1499 pub bytes_allocated: usize,
1501 pub alloc_count: usize,
1503 pub chunk_count: usize,
1505 pub wasted_bytes: usize,
1507}
1508#[allow(dead_code)]
1509impl ArenaStatsExt {
1510 pub fn new() -> Self {
1512 Self {
1513 bytes_allocated: 0,
1514 alloc_count: 0,
1515 chunk_count: 0,
1516 wasted_bytes: 0,
1517 }
1518 }
1519 pub fn avg_alloc_size(&self) -> f64 {
1521 if self.alloc_count == 0 {
1522 return 0.0;
1523 }
1524 self.bytes_allocated as f64 / self.alloc_count as f64
1525 }
1526 pub fn fragmentation(&self) -> f64 {
1528 let total = self.bytes_allocated + self.wasted_bytes;
1529 if total == 0 {
1530 return 0.0;
1531 }
1532 self.wasted_bytes as f64 / total as f64
1533 }
1534}
1535#[allow(dead_code)]
1537pub struct PoolArena {
1538 slot_size: usize,
1539 capacity: usize,
1540 free_list: Vec<usize>,
1541 data: Vec<u8>,
1542}
1543#[allow(dead_code)]
1544impl PoolArena {
1545 pub fn new(slot_size: usize, capacity: usize) -> Self {
1547 let data = vec![0u8; slot_size * capacity];
1548 let free_list: Vec<usize> = (0..capacity).collect();
1549 Self {
1550 slot_size,
1551 capacity,
1552 free_list,
1553 data,
1554 }
1555 }
1556 pub fn alloc_slot(&mut self) -> Option<usize> {
1558 self.free_list.pop()
1559 }
1560 pub fn free_slot(&mut self, idx: usize) {
1562 if idx < self.capacity {
1563 self.free_list.push(idx);
1564 }
1565 }
1566 pub fn available(&self) -> usize {
1568 self.free_list.len()
1569 }
1570 pub fn capacity(&self) -> usize {
1572 self.capacity
1573 }
1574 pub fn slot_size(&self) -> usize {
1576 self.slot_size
1577 }
1578}
1579#[derive(Debug)]
1583pub struct SlabArena<T> {
1584 data: Vec<Option<T>>,
1585 free_list: Vec<u32>,
1586}
1587impl<T> SlabArena<T> {
1588 pub fn new() -> Self {
1590 Self {
1591 data: Vec::new(),
1592 free_list: Vec::new(),
1593 }
1594 }
1595 pub fn with_capacity(cap: usize) -> Self {
1597 Self {
1598 data: Vec::with_capacity(cap),
1599 free_list: Vec::new(),
1600 }
1601 }
1602 pub fn alloc(&mut self, value: T) -> Idx<T> {
1604 if let Some(idx) = self.free_list.pop() {
1605 self.data[idx as usize] = Some(value);
1606 Idx::new(idx)
1607 } else {
1608 let idx = self.data.len() as u32;
1609 assert!(idx < u32::MAX, "slab arena overflow");
1610 self.data.push(Some(value));
1611 Idx::new(idx)
1612 }
1613 }
1614 pub fn free(&mut self, idx: Idx<T>) -> Option<T> {
1618 let slot = self.data.get_mut(idx.raw as usize)?;
1619 let value = slot.take()?;
1620 self.free_list.push(idx.raw);
1621 Some(value)
1622 }
1623 pub fn get(&self, idx: Idx<T>) -> Option<&T> {
1627 self.data.get(idx.raw as usize)?.as_ref()
1628 }
1629 pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut T> {
1631 self.data.get_mut(idx.raw as usize)?.as_mut()
1632 }
1633 pub fn len(&self) -> usize {
1635 self.data.iter().filter(|s| s.is_some()).count()
1636 }
1637 pub fn capacity_slots(&self) -> usize {
1639 self.data.len()
1640 }
1641 pub fn is_empty(&self) -> bool {
1643 self.len() == 0
1644 }
1645 pub fn free_count(&self) -> usize {
1647 self.free_list.len()
1648 }
1649 pub fn iter_live(&self) -> impl Iterator<Item = (Idx<T>, &T)> {
1651 self.data
1652 .iter()
1653 .enumerate()
1654 .filter_map(|(i, slot)| slot.as_ref().map(|v| (Idx::new(i as u32), v)))
1655 }
1656}
1657#[derive(Clone, Copy)]
1662pub struct Idx<T> {
1663 pub(super) raw: u32,
1664 _marker: PhantomData<T>,
1665}
1666impl<T> Idx<T> {
1667 #[inline]
1672 pub(crate) fn new(raw: u32) -> Self {
1673 Self {
1674 raw,
1675 _marker: PhantomData,
1676 }
1677 }
1678 #[inline]
1680 pub fn raw(&self) -> u32 {
1681 self.raw
1682 }
1683 pub fn cast<U>(self) -> Idx<U> {
1688 Idx::new(self.raw)
1689 }
1690 pub fn is_first(self) -> bool {
1692 self.raw == 0
1693 }
1694 pub fn next(self) -> Self {
1696 Self::new(self.raw + 1)
1697 }
1698 pub fn to_usize(self) -> usize {
1700 self.raw as usize
1701 }
1702}
1703#[allow(dead_code)]
1705pub struct EventCounter {
1706 counts: std::collections::HashMap<String, u64>,
1707}
1708#[allow(dead_code)]
1709impl EventCounter {
1710 pub fn new() -> Self {
1712 Self {
1713 counts: std::collections::HashMap::new(),
1714 }
1715 }
1716 pub fn inc(&mut self, event: &str) {
1718 *self.counts.entry(event.to_string()).or_insert(0) += 1;
1719 }
1720 pub fn add(&mut self, event: &str, n: u64) {
1722 *self.counts.entry(event.to_string()).or_insert(0) += n;
1723 }
1724 pub fn get(&self, event: &str) -> u64 {
1726 self.counts.get(event).copied().unwrap_or(0)
1727 }
1728 pub fn total(&self) -> u64 {
1730 self.counts.values().sum()
1731 }
1732 pub fn reset(&mut self) {
1734 self.counts.clear();
1735 }
1736 pub fn event_names(&self) -> Vec<&str> {
1738 self.counts.keys().map(|s| s.as_str()).collect()
1739 }
1740}