Skip to main content

aver_memory/
arena.rs

1use super::*;
2
3impl<T: ArenaTypes> Arena<T> {
4    pub fn new() -> Self {
5        Arena {
6            young_entries: Vec::with_capacity(256),
7            yard_entries: Vec::with_capacity(64),
8            handoff_entries: Vec::with_capacity(64),
9            stable_entries: Vec::with_capacity(64),
10            scratch_young: Vec::new(),
11            scratch_yard: Vec::new(),
12            scratch_handoff: Vec::new(),
13            scratch_stable: Vec::new(),
14            peak_usage: ArenaUsage::default(),
15            alloc_space: AllocSpace::Young,
16            type_names: Vec::new(),
17            type_field_names: Vec::new(),
18            type_variant_names: Vec::new(),
19            type_variant_ctor_ids: Vec::new(),
20            ctor_to_type_variant: Vec::new(),
21            symbol_entries: Vec::new(),
22        }
23    }
24
25    /// Create a fresh Arena with only the static context (symbols, type metadata,
26    /// stable constants) from this Arena. Dynamic runtime entries are empty.
27    /// Used for independent product threads: each gets a clean Arena with just the
28    /// compile-time context needed to execute functions and builtins.
29    pub fn clone_static(&self) -> Self {
30        Arena {
31            young_entries: Vec::with_capacity(64),
32            yard_entries: Vec::new(),
33            handoff_entries: Vec::new(),
34            stable_entries: self.stable_entries.clone(),
35            scratch_young: Vec::new(),
36            scratch_yard: Vec::new(),
37            scratch_handoff: Vec::new(),
38            scratch_stable: Vec::new(),
39            peak_usage: ArenaUsage::default(),
40            alloc_space: AllocSpace::Young,
41            type_names: self.type_names.clone(),
42            type_field_names: self.type_field_names.clone(),
43            type_variant_names: self.type_variant_names.clone(),
44            type_variant_ctor_ids: self.type_variant_ctor_ids.clone(),
45            ctor_to_type_variant: self.ctor_to_type_variant.clone(),
46            symbol_entries: self.symbol_entries.clone(),
47        }
48    }
49
50    /// Deep-import a NanValue from `source` arena into `self`.
51    /// Inline values (int, float, bool, unit, none, empty containers) are returned as-is.
52    /// Heap-referenced values are recursively copied into `self` with new indices.
53    pub fn deep_import(&mut self, value: NanValue, source: &Arena<T>) -> NanValue {
54        // Not NaN-boxed = plain float, return as-is
55        if !value.is_nan_boxed() {
56            return value;
57        }
58        // Check if it has a heap index — if not, it's inline
59        let heap_idx = match value.heap_index() {
60            Some(idx) => idx,
61            None => return value, // inline int, bool, unit, none, empty list/map/etc
62        };
63
64        let entry = source.get(heap_idx).clone();
65        match entry {
66            ArenaEntry::Int(i) => NanValue::new_int(i, self),
67            ArenaEntry::String(s) => {
68                let idx = self.push(ArenaEntry::String(s));
69                NanValue::new_string(idx)
70            }
71            ArenaEntry::Tuple(items) => {
72                let imported: Vec<NanValue> =
73                    items.iter().map(|v| self.deep_import(*v, source)).collect();
74                let idx = self.push_tuple(imported);
75                NanValue::new_tuple(idx)
76            }
77            ArenaEntry::List(_) => {
78                // Flatten list and re-import as a fresh flat list
79                let flat = source.list_to_vec_value(value);
80                let imported: Vec<NanValue> =
81                    flat.iter().map(|v| self.deep_import(*v, source)).collect();
82                if imported.is_empty() {
83                    NanValue::EMPTY_LIST
84                } else {
85                    let rc_items = Rc::new(imported);
86                    let idx = self.push(ArenaEntry::List(ArenaList::Flat {
87                        items: rc_items,
88                        start: 0,
89                    }));
90                    NanValue::new_list(idx)
91                }
92            }
93            ArenaEntry::Map(map) => {
94                let mut new_map = T::Map::new();
95                for (hash, (k, v)) in map.iter() {
96                    let ik = self.deep_import(*k, source);
97                    let iv = self.deep_import(*v, source);
98                    new_map = new_map.insert(*hash, (ik, iv));
99                }
100                let idx = self.push(ArenaEntry::Map(new_map));
101                NanValue::new_map(idx)
102            }
103            ArenaEntry::Vector(items) => {
104                let imported: Vec<NanValue> =
105                    items.iter().map(|v| self.deep_import(*v, source)).collect();
106                let idx = self.push(ArenaEntry::Vector(imported));
107                NanValue::new_vector(idx)
108            }
109            ArenaEntry::Record { type_id, fields } => {
110                let imported: Vec<NanValue> = fields
111                    .iter()
112                    .map(|v| self.deep_import(*v, source))
113                    .collect();
114                let idx = self.push(ArenaEntry::Record {
115                    type_id,
116                    fields: imported,
117                });
118                NanValue::new_record(idx)
119            }
120            ArenaEntry::Variant {
121                type_id,
122                variant_id,
123                fields,
124            } => {
125                let imported: Vec<NanValue> = fields
126                    .iter()
127                    .map(|v| self.deep_import(*v, source))
128                    .collect();
129                let idx = self.push(ArenaEntry::Variant {
130                    type_id,
131                    variant_id,
132                    fields: imported,
133                });
134                NanValue::new_variant(idx)
135            }
136            ArenaEntry::Boxed(inner) => {
137                let imported = self.deep_import(inner, source);
138                let idx = self.push(ArenaEntry::Boxed(imported));
139                NanValue::encode(value.tag(), ARENA_REF_BIT | (idx as u64))
140            }
141            // Fn/Builtin/Namespace — should not appear in independent product results
142            ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => value,
143        }
144    }
145
146    #[inline]
147    pub fn push(&mut self, entry: ArenaEntry<T>) -> u32 {
148        match &entry {
149            ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => {}
150            _ => {
151                return match self.alloc_space {
152                    AllocSpace::Young => {
153                        let idx = self.young_entries.len() as u32;
154                        self.young_entries.push(entry);
155                        self.note_peak_usage();
156                        Self::encode_index(HeapSpace::Young, idx)
157                    }
158                    AllocSpace::Yard => {
159                        let idx = self.yard_entries.len() as u32;
160                        self.yard_entries.push(entry);
161                        self.note_peak_usage();
162                        Self::encode_index(HeapSpace::Yard, idx)
163                    }
164                    AllocSpace::Handoff => {
165                        let idx = self.handoff_entries.len() as u32;
166                        self.handoff_entries.push(entry);
167                        self.note_peak_usage();
168                        Self::encode_index(HeapSpace::Handoff, idx)
169                    }
170                };
171            }
172        }
173        match entry {
174            ArenaEntry::Fn(f) => self.push_symbol(ArenaSymbol::Fn(f)),
175            ArenaEntry::Builtin(name) => self.push_symbol(ArenaSymbol::Builtin(name)),
176            ArenaEntry::Namespace { name, members } => {
177                self.push_symbol(ArenaSymbol::Namespace { name, members })
178            }
179            _ => unreachable!("non-symbol entry already returned above"),
180        }
181    }
182
183    #[inline]
184    pub fn push_symbol(&mut self, symbol: ArenaSymbol<T>) -> u32 {
185        let idx = self.symbol_entries.len() as u32;
186        self.symbol_entries.push(symbol);
187        idx
188    }
189
190    #[inline]
191    pub fn get(&self, index: u32) -> &ArenaEntry<T> {
192        let (space, raw_index) = Self::decode_index(index);
193        match space {
194            HeapSpace::Young => &self.young_entries[raw_index as usize],
195            HeapSpace::Yard => &self.yard_entries[raw_index as usize],
196            HeapSpace::Handoff => &self.handoff_entries[raw_index as usize],
197            HeapSpace::Stable => &self.stable_entries[raw_index as usize],
198        }
199    }
200
201    #[inline]
202    pub fn get_mut(&mut self, index: u32) -> &mut ArenaEntry<T> {
203        let (space, raw_index) = Self::decode_index(index);
204        match space {
205            HeapSpace::Young => &mut self.young_entries[raw_index as usize],
206            HeapSpace::Yard => &mut self.yard_entries[raw_index as usize],
207            HeapSpace::Handoff => &mut self.handoff_entries[raw_index as usize],
208            HeapSpace::Stable => &mut self.stable_entries[raw_index as usize],
209        }
210    }
211
212    #[inline]
213    pub(crate) fn encode_index(space: HeapSpace, index: u32) -> u32 {
214        ((space as u32) << HEAP_SPACE_SHIFT) | index
215    }
216
217    #[inline]
218    pub(crate) fn encode_yard_index(index: u32) -> u32 {
219        Self::encode_index(HeapSpace::Yard, index)
220    }
221
222    #[inline]
223    pub(crate) fn encode_stable_index(index: u32) -> u32 {
224        Self::encode_index(HeapSpace::Stable, index)
225    }
226
227    #[inline]
228    pub(crate) fn encode_handoff_index(index: u32) -> u32 {
229        Self::encode_index(HeapSpace::Handoff, index)
230    }
231
232    #[inline]
233    pub(crate) fn decode_index(index: u32) -> (HeapSpace, u32) {
234        let space = match (index & HEAP_SPACE_MASK_U32) >> HEAP_SPACE_SHIFT {
235            0 => HeapSpace::Young,
236            1 => HeapSpace::Yard,
237            2 => HeapSpace::Handoff,
238            3 => HeapSpace::Stable,
239            _ => unreachable!("invalid heap space bits"),
240        };
241        (space, index & HEAP_INDEX_MASK_U32)
242    }
243
244    #[inline]
245    pub fn is_stable_index(index: u32) -> bool {
246        matches!(Self::decode_index(index).0, HeapSpace::Stable)
247    }
248
249    #[inline]
250    pub fn is_yard_index_in_region(&self, index: u32, mark: u32) -> bool {
251        let (space, raw_index) = Self::decode_index(index);
252        matches!(space, HeapSpace::Yard)
253            && raw_index >= mark
254            && raw_index < self.yard_entries.len() as u32
255    }
256
257    #[inline]
258    pub fn is_handoff_index_in_region(&self, index: u32, mark: u32) -> bool {
259        let (space, raw_index) = Self::decode_index(index);
260        matches!(space, HeapSpace::Handoff)
261            && raw_index >= mark
262            && raw_index < self.handoff_entries.len() as u32
263    }
264
265    #[inline]
266    pub fn is_young_index_in_region(&self, index: u32, mark: u32) -> bool {
267        let (space, raw_index) = Self::decode_index(index);
268        matches!(space, HeapSpace::Young)
269            && raw_index >= mark
270            && raw_index < self.young_entries.len() as u32
271    }
272
273    #[inline]
274    pub fn young_len(&self) -> usize {
275        self.young_entries.len()
276    }
277
278    #[inline]
279    pub fn yard_len(&self) -> usize {
280        self.yard_entries.len()
281    }
282
283    #[inline]
284    pub fn handoff_len(&self) -> usize {
285        self.handoff_entries.len()
286    }
287
288    #[inline]
289    pub fn stable_len(&self) -> usize {
290        self.stable_entries.len()
291    }
292
293    #[inline]
294    pub fn usage(&self) -> ArenaUsage {
295        ArenaUsage {
296            young: self.young_entries.len(),
297            yard: self.yard_entries.len(),
298            handoff: self.handoff_entries.len(),
299            stable: self.stable_entries.len(),
300        }
301    }
302
303    #[inline]
304    pub fn peak_usage(&self) -> ArenaUsage {
305        self.peak_usage
306    }
307
308    #[inline]
309    pub(crate) fn note_peak_usage(&mut self) {
310        let usage = self.usage();
311        self.peak_usage.young = self.peak_usage.young.max(usage.young);
312        self.peak_usage.yard = self.peak_usage.yard.max(usage.yard);
313        self.peak_usage.handoff = self.peak_usage.handoff.max(usage.handoff);
314        self.peak_usage.stable = self.peak_usage.stable.max(usage.stable);
315    }
316
317    #[inline]
318    pub(crate) fn take_u32_scratch(slot: &mut Vec<u32>, len: usize) -> Vec<u32> {
319        let mut scratch = core::mem::take(slot);
320        scratch.clear();
321        scratch.resize(len, u32::MAX);
322        scratch
323    }
324
325    #[inline]
326    pub(crate) fn recycle_u32_scratch(slot: &mut Vec<u32>, mut scratch: Vec<u32>) {
327        scratch.clear();
328        *slot = scratch;
329    }
330
331    #[inline]
332    pub fn is_frame_local_index(
333        &self,
334        index: u32,
335        arena_mark: u32,
336        yard_mark: u32,
337        handoff_mark: u32,
338    ) -> bool {
339        self.is_young_index_in_region(index, arena_mark)
340            || self.is_yard_index_in_region(index, yard_mark)
341            || self.is_handoff_index_in_region(index, handoff_mark)
342    }
343
344    pub fn with_alloc_space<R>(
345        &mut self,
346        space: AllocSpace,
347        f: impl FnOnce(&mut Arena<T>) -> R,
348    ) -> R {
349        let prev = self.alloc_space;
350        self.alloc_space = space;
351        let out = f(self);
352        self.alloc_space = prev;
353        out
354    }
355
356    // -- Typed push helpers ------------------------------------------------
357
358    pub fn push_i64(&mut self, val: i64) -> u32 {
359        self.push(ArenaEntry::Int(val))
360    }
361    pub fn push_string(&mut self, s: &str) -> u32 {
362        self.push(ArenaEntry::String(Rc::from(s)))
363    }
364    pub fn push_boxed(&mut self, val: NanValue) -> u32 {
365        self.push(ArenaEntry::Boxed(val))
366    }
367    pub fn push_record(&mut self, type_id: u32, fields: Vec<NanValue>) -> u32 {
368        self.push(ArenaEntry::Record { type_id, fields })
369    }
370    pub fn push_variant(&mut self, type_id: u32, variant_id: u16, fields: Vec<NanValue>) -> u32 {
371        self.push(ArenaEntry::Variant {
372            type_id,
373            variant_id,
374            fields,
375        })
376    }
377    pub fn push_list(&mut self, items: Vec<NanValue>) -> u32 {
378        self.push(ArenaEntry::List(ArenaList::Flat {
379            items: Rc::new(items),
380            start: 0,
381        }))
382    }
383    pub fn push_map(&mut self, map: T::Map) -> u32 {
384        self.push(ArenaEntry::Map(map))
385    }
386    pub fn push_tuple(&mut self, items: Vec<NanValue>) -> u32 {
387        self.push(ArenaEntry::Tuple(items))
388    }
389    pub fn push_vector(&mut self, items: Vec<NanValue>) -> u32 {
390        self.push(ArenaEntry::Vector(items))
391    }
392    pub fn push_fn(&mut self, f: Rc<T::Fn>) -> u32 {
393        self.push_symbol(ArenaSymbol::Fn(f))
394    }
395    pub fn push_builtin(&mut self, name: &str) -> u32 {
396        self.push_symbol(ArenaSymbol::Builtin(Rc::from(name)))
397    }
398    pub fn push_nullary_variant_symbol(&mut self, ctor_id: u32) -> u32 {
399        self.push_symbol(ArenaSymbol::NullaryVariant { ctor_id })
400    }
401
402    // -- Typed getters -----------------------------------------------------
403
404    pub fn get_i64(&self, index: u32) -> i64 {
405        match self.get(index) {
406            ArenaEntry::Int(i) => *i,
407            _ => panic!("Arena: expected Int at {}", index),
408        }
409    }
410    pub fn get_string(&self, index: u32) -> &str {
411        match self.get(index) {
412            ArenaEntry::String(s) => s,
413            other => panic!("Arena: expected String at {} but found {:?}", index, other),
414        }
415    }
416    pub fn get_string_value(&self, value: NanValue) -> NanString<'_> {
417        if let Some(s) = value.small_string() {
418            s
419        } else {
420            NanString::Borrowed(self.get_string(value.arena_index()))
421        }
422    }
423    pub fn get_boxed(&self, index: u32) -> NanValue {
424        match self.get(index) {
425            ArenaEntry::Boxed(v) => *v,
426            _ => panic!("Arena: expected Boxed at {}", index),
427        }
428    }
429    pub fn get_record(&self, index: u32) -> (u32, &[NanValue]) {
430        match self.get(index) {
431            ArenaEntry::Record { type_id, fields } => (*type_id, fields),
432            _ => panic!("Arena: expected Record at {}", index),
433        }
434    }
435    pub fn get_variant(&self, index: u32) -> (u32, u16, &[NanValue]) {
436        match self.get(index) {
437            ArenaEntry::Variant {
438                type_id,
439                variant_id,
440                fields,
441            } => (*type_id, *variant_id, fields),
442            other => panic!("Arena: expected Variant at {} but found {:?}", index, other),
443        }
444    }
445    pub fn get_list(&self, index: u32) -> &ArenaList {
446        match self.get(index) {
447            ArenaEntry::List(items) => items,
448            _ => panic!("Arena: expected List at {}", index),
449        }
450    }
451    pub fn get_tuple(&self, index: u32) -> &[NanValue] {
452        match self.get(index) {
453            ArenaEntry::Tuple(items) => items,
454            _ => panic!("Arena: expected Tuple at {}", index),
455        }
456    }
457    pub fn get_vector(&self, index: u32) -> &[NanValue] {
458        match self.get(index) {
459            ArenaEntry::Vector(items) => items,
460            _ => panic!("Arena: expected Vector at {}", index),
461        }
462    }
463    pub fn get_vector_mut(&mut self, index: u32) -> &mut Vec<NanValue> {
464        match self.get_mut(index) {
465            ArenaEntry::Vector(items) => items,
466            _ => panic!("Arena: expected Vector at {}", index),
467        }
468    }
469    pub fn vector_ref_value(&self, value: NanValue) -> &[NanValue] {
470        if value.is_empty_vector_immediate() {
471            return &[];
472        }
473        self.get_vector(value.arena_index())
474    }
475    pub fn clone_vector_value(&self, value: NanValue) -> Vec<NanValue> {
476        if value.is_empty_vector_immediate() {
477            Vec::new()
478        } else {
479            self.get_vector(value.arena_index()).to_vec()
480        }
481    }
482    /// Take ownership of a vector, replacing the arena slot with an empty vec.
483    pub fn take_vector_value(&mut self, value: NanValue) -> Vec<NanValue> {
484        if value.is_empty_vector_immediate() {
485            Vec::new()
486        } else {
487            let index = value.arena_index();
488            std::mem::take(self.get_vector_mut(index))
489        }
490    }
491    pub fn get_map(&self, index: u32) -> &T::Map {
492        match self.get(index) {
493            ArenaEntry::Map(map) => map,
494            _ => panic!("Arena: expected Map at {}", index),
495        }
496    }
497    pub fn get_map_mut(&mut self, index: u32) -> &mut T::Map {
498        match self.get_mut(index) {
499            ArenaEntry::Map(map) => map,
500            _ => panic!("Arena: expected Map at {}", index),
501        }
502    }
503    pub fn map_ref_value(&self, map: NanValue) -> &T::Map {
504        if map.is_empty_map_immediate() {
505            // Use a leaked singleton for the empty map reference.
506            // This avoids thread_local! which is not available in no_std.
507            use core::sync::atomic::{AtomicPtr, Ordering as AtomicOrdering};
508            static EMPTY_MAP_PTR: AtomicPtr<()> = AtomicPtr::new(core::ptr::null_mut());
509
510            let ptr = EMPTY_MAP_PTR.load(AtomicOrdering::Acquire);
511            if !ptr.is_null() {
512                // SAFETY: ptr was allocated via Box::leak and is valid for 'static
513                return unsafe { &*(ptr as *const T::Map) };
514            }
515            let boxed = alloc::boxed::Box::new(T::Map::new());
516            let leaked: &'static T::Map = alloc::boxed::Box::leak(boxed);
517            let new_ptr = leaked as *const T::Map as *mut ();
518            // If another thread raced us, that's fine — we just leak one extra allocation
519            EMPTY_MAP_PTR.store(new_ptr, AtomicOrdering::Release);
520            leaked
521        } else {
522            self.get_map(map.arena_index())
523        }
524    }
525    pub fn clone_map_value(&self, map: NanValue) -> T::Map {
526        if map.is_empty_map_immediate() {
527            T::Map::new()
528        } else {
529            self.get_map(map.arena_index()).clone()
530        }
531    }
532    /// Take ownership of a map value, replacing it with an empty map in the arena.
533    /// Use when the caller is the sole owner (reuse analysis says `owned = true`).
534    /// Avoids the O(n) clone — the original slot becomes empty.
535    pub fn take_map_value(&mut self, map: NanValue) -> T::Map {
536        if map.is_empty_map_immediate() {
537            T::Map::new()
538        } else {
539            let index = map.arena_index();
540            std::mem::replace(self.get_map_mut(index), T::Map::new())
541        }
542    }
543    pub fn get_fn(&self, index: u32) -> &T::Fn {
544        match &self.symbol_entries[index as usize] {
545            ArenaSymbol::Fn(f) => f,
546            _ => panic!("Arena: expected Fn symbol at {}", index),
547        }
548    }
549    pub fn get_fn_rc(&self, index: u32) -> &Rc<T::Fn> {
550        match &self.symbol_entries[index as usize] {
551            ArenaSymbol::Fn(f) => f,
552            _ => panic!("Arena: expected Fn symbol at {}", index),
553        }
554    }
555    pub fn get_builtin(&self, index: u32) -> &str {
556        match &self.symbol_entries[index as usize] {
557            ArenaSymbol::Builtin(s) => s,
558            _ => panic!("Arena: expected Builtin symbol at {}", index),
559        }
560    }
561    pub fn get_namespace(&self, index: u32) -> (&str, &[(Rc<str>, NanValue)]) {
562        match &self.symbol_entries[index as usize] {
563            ArenaSymbol::Namespace { name, members } => (name, members),
564            _ => panic!("Arena: expected Namespace symbol at {}", index),
565        }
566    }
567    pub fn get_nullary_variant_ctor(&self, index: u32) -> u32 {
568        match &self.symbol_entries[index as usize] {
569            ArenaSymbol::NullaryVariant { ctor_id } => *ctor_id,
570            _ => panic!("Arena: expected NullaryVariant symbol at {}", index),
571        }
572    }
573
574    // -- Type registry -----------------------------------------------------
575
576    pub fn register_record_type(&mut self, name: &str, field_names: Vec<String>) -> u32 {
577        let id = self.type_names.len() as u32;
578        self.type_names.push(String::from(name));
579        self.type_field_names.push(field_names);
580        self.type_variant_names.push(Vec::new());
581        self.type_variant_ctor_ids.push(Vec::new());
582        id
583    }
584
585    pub fn register_sum_type(&mut self, name: &str, variant_names: Vec<String>) -> u32 {
586        let id = self.type_names.len() as u32;
587        self.type_names.push(String::from(name));
588        self.type_field_names.push(Vec::new());
589        let ctor_ids: Vec<u32> = (0..variant_names.len())
590            .map(|variant_idx| {
591                let ctor_id = self.ctor_to_type_variant.len() as u32;
592                self.ctor_to_type_variant.push((id, variant_idx as u16));
593                ctor_id
594            })
595            .collect();
596        self.type_variant_names.push(variant_names);
597        self.type_variant_ctor_ids.push(ctor_ids);
598        id
599    }
600
601    pub fn register_variant_name(&mut self, type_id: u32, variant_name: String) -> u16 {
602        let variants = &mut self.type_variant_names[type_id as usize];
603        let variant_id = variants.len() as u16;
604        variants.push(variant_name);
605
606        let ctor_id = self.ctor_to_type_variant.len() as u32;
607        self.ctor_to_type_variant.push((type_id, variant_id));
608        self.type_variant_ctor_ids[type_id as usize].push(ctor_id);
609
610        variant_id
611    }
612
613    pub fn get_type_name(&self, type_id: u32) -> &str {
614        &self.type_names[type_id as usize]
615    }
616    pub fn type_count(&self) -> u32 {
617        self.type_names.len() as u32
618    }
619    pub fn get_field_names(&self, type_id: u32) -> &[String] {
620        &self.type_field_names[type_id as usize]
621    }
622    pub fn get_variant_name(&self, type_id: u32, variant_id: u16) -> &str {
623        &self.type_variant_names[type_id as usize][variant_id as usize]
624    }
625    pub fn find_type_id(&self, name: &str) -> Option<u32> {
626        self.type_names
627            .iter()
628            .position(|n| n == name)
629            .map(|i| i as u32)
630    }
631    pub fn find_variant_id(&self, type_id: u32, variant_name: &str) -> Option<u16> {
632        self.type_variant_names
633            .get(type_id as usize)?
634            .iter()
635            .position(|n| n == variant_name)
636            .map(|i| i as u16)
637    }
638
639    pub fn find_ctor_id(&self, type_id: u32, variant_id: u16) -> Option<u32> {
640        self.type_variant_ctor_ids
641            .get(type_id as usize)?
642            .get(variant_id as usize)
643            .copied()
644    }
645
646    pub fn get_ctor_parts(&self, ctor_id: u32) -> (u32, u16) {
647        self.ctor_to_type_variant
648            .get(ctor_id as usize)
649            .copied()
650            .unwrap_or_else(|| panic!("Arena: expected ctor id {} to be registered", ctor_id))
651    }
652
653    pub fn len(&self) -> usize {
654        self.young_entries.len()
655            + self.yard_entries.len()
656            + self.handoff_entries.len()
657            + self.stable_entries.len()
658    }
659    pub fn is_empty(&self) -> bool {
660        self.young_entries.is_empty()
661            && self.yard_entries.is_empty()
662            && self.handoff_entries.is_empty()
663            && self.stable_entries.is_empty()
664    }
665}
666
667impl<T: ArenaTypes> Default for Arena<T> {
668    fn default() -> Self {
669        Self::new()
670    }
671}