Skip to main content

aver_memory/
arena.rs

1use super::*;
2
3impl<T: ArenaTypes> Arena<T> {
4    pub fn new() -> Self {
5        Arena {
6            young_entries: Vec::with_capacity(256),
7            yard_entries: Vec::with_capacity(64),
8            handoff_entries: Vec::with_capacity(64),
9            stable_entries: Vec::with_capacity(64),
10            scratch_young: Vec::new(),
11            scratch_yard: Vec::new(),
12            scratch_handoff: Vec::new(),
13            scratch_stable: Vec::new(),
14            peak_usage: ArenaUsage::default(),
15            alloc_space: AllocSpace::Young,
16            type_names: Vec::new(),
17            type_field_names: Vec::new(),
18            type_variant_names: Vec::new(),
19            type_variant_ctor_ids: Vec::new(),
20            ctor_to_type_variant: Vec::new(),
21            symbol_entries: Vec::new(),
22            type_aliases: Vec::new(),
23        }
24    }
25
26    /// Create a fresh Arena with only the static context (symbols, type metadata,
27    /// stable constants) from this Arena. Dynamic runtime entries are empty.
28    /// Used for independent product threads: each gets a clean Arena with just the
29    /// compile-time context needed to execute functions and builtins.
30    pub fn clone_static(&self) -> Self {
31        Arena {
32            young_entries: Vec::with_capacity(64),
33            yard_entries: Vec::new(),
34            handoff_entries: Vec::new(),
35            stable_entries: self.stable_entries.clone(),
36            scratch_young: Vec::new(),
37            scratch_yard: Vec::new(),
38            scratch_handoff: Vec::new(),
39            scratch_stable: Vec::new(),
40            peak_usage: ArenaUsage::default(),
41            alloc_space: AllocSpace::Young,
42            type_names: self.type_names.clone(),
43            type_field_names: self.type_field_names.clone(),
44            type_variant_names: self.type_variant_names.clone(),
45            type_variant_ctor_ids: self.type_variant_ctor_ids.clone(),
46            ctor_to_type_variant: self.ctor_to_type_variant.clone(),
47            symbol_entries: self.symbol_entries.clone(),
48            type_aliases: self.type_aliases.clone(),
49        }
50    }
51
52    /// Deep-import a NanValue from `source` arena into `self`.
53    /// Inline values (int, float, bool, unit, none, empty containers) are returned as-is.
54    /// Heap-referenced values are recursively copied into `self` with new indices.
55    pub fn deep_import(&mut self, value: NanValue, source: &Arena<T>) -> NanValue {
56        // Not NaN-boxed = plain float, return as-is
57        if !value.is_nan_boxed() {
58            return value;
59        }
60        // Check if it has a heap index — if not, it's inline
61        let heap_idx = match value.heap_index() {
62            Some(idx) => idx,
63            None => return value, // inline int, bool, unit, none, empty list/map/etc
64        };
65
66        let entry = source.get(heap_idx).clone();
67        match entry {
68            ArenaEntry::Int(i) => NanValue::new_int(i, self),
69            ArenaEntry::String(s) => {
70                let idx = self.push(ArenaEntry::String(s));
71                NanValue::new_string(idx)
72            }
73            ArenaEntry::Tuple(items) => {
74                let imported: Vec<NanValue> =
75                    items.iter().map(|v| self.deep_import(*v, source)).collect();
76                let idx = self.push_tuple(imported);
77                NanValue::new_tuple(idx)
78            }
79            ArenaEntry::List(_) => {
80                // Flatten list and re-import as a fresh flat list
81                let flat = source.list_to_vec_value(value);
82                let imported: Vec<NanValue> =
83                    flat.iter().map(|v| self.deep_import(*v, source)).collect();
84                if imported.is_empty() {
85                    NanValue::EMPTY_LIST
86                } else {
87                    let rc_items = Rc::new(imported);
88                    let idx = self.push(ArenaEntry::List(ArenaList::Flat {
89                        items: rc_items,
90                        start: 0,
91                    }));
92                    NanValue::new_list(idx)
93                }
94            }
95            ArenaEntry::Map(map) => {
96                let mut new_map = T::Map::new();
97                for (hash, (k, v)) in map.iter() {
98                    let ik = self.deep_import(*k, source);
99                    let iv = self.deep_import(*v, source);
100                    new_map = new_map.insert(*hash, (ik, iv));
101                }
102                let idx = self.push(ArenaEntry::Map(new_map));
103                NanValue::new_map(idx)
104            }
105            ArenaEntry::Vector(items) => {
106                let imported: Vec<NanValue> =
107                    items.iter().map(|v| self.deep_import(*v, source)).collect();
108                let idx = self.push(ArenaEntry::Vector(imported));
109                NanValue::new_vector(idx)
110            }
111            ArenaEntry::Record { type_id, fields } => {
112                let imported: Vec<NanValue> = fields
113                    .iter()
114                    .map(|v| self.deep_import(*v, source))
115                    .collect();
116                let idx = self.push(ArenaEntry::Record {
117                    type_id,
118                    fields: imported,
119                });
120                NanValue::new_record(idx)
121            }
122            ArenaEntry::Variant {
123                type_id,
124                variant_id,
125                fields,
126            } => {
127                let imported: Vec<NanValue> = fields
128                    .iter()
129                    .map(|v| self.deep_import(*v, source))
130                    .collect();
131                let idx = self.push(ArenaEntry::Variant {
132                    type_id,
133                    variant_id,
134                    fields: imported,
135                });
136                NanValue::new_variant(idx)
137            }
138            ArenaEntry::Boxed(inner) => {
139                let imported = self.deep_import(inner, source);
140                let idx = self.push(ArenaEntry::Boxed(imported));
141                NanValue::encode(value.tag(), ARENA_REF_BIT | (idx as u64))
142            }
143            // Fn/Builtin/Namespace — should not appear in independent product results
144            ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => value,
145        }
146    }
147
148    #[inline]
149    pub fn push(&mut self, entry: ArenaEntry<T>) -> u32 {
150        match &entry {
151            ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => {}
152            _ => {
153                return match self.alloc_space {
154                    AllocSpace::Young => {
155                        let idx = self.young_entries.len() as u32;
156                        self.young_entries.push(entry);
157                        self.note_peak_usage();
158                        Self::encode_index(HeapSpace::Young, idx)
159                    }
160                    AllocSpace::Yard => {
161                        let idx = self.yard_entries.len() as u32;
162                        self.yard_entries.push(entry);
163                        self.note_peak_usage();
164                        Self::encode_index(HeapSpace::Yard, idx)
165                    }
166                    AllocSpace::Handoff => {
167                        let idx = self.handoff_entries.len() as u32;
168                        self.handoff_entries.push(entry);
169                        self.note_peak_usage();
170                        Self::encode_index(HeapSpace::Handoff, idx)
171                    }
172                };
173            }
174        }
175        match entry {
176            ArenaEntry::Fn(f) => self.push_symbol(ArenaSymbol::Fn(f)),
177            ArenaEntry::Builtin(name) => self.push_symbol(ArenaSymbol::Builtin(name)),
178            ArenaEntry::Namespace { name, members } => {
179                self.push_symbol(ArenaSymbol::Namespace { name, members })
180            }
181            _ => unreachable!("non-symbol entry already returned above"),
182        }
183    }
184
185    #[inline]
186    pub fn push_symbol(&mut self, symbol: ArenaSymbol<T>) -> u32 {
187        let idx = self.symbol_entries.len() as u32;
188        self.symbol_entries.push(symbol);
189        idx
190    }
191
192    #[inline]
193    pub fn get(&self, index: u32) -> &ArenaEntry<T> {
194        let (space, raw_index) = Self::decode_index(index);
195        match space {
196            HeapSpace::Young => &self.young_entries[raw_index as usize],
197            HeapSpace::Yard => &self.yard_entries[raw_index as usize],
198            HeapSpace::Handoff => &self.handoff_entries[raw_index as usize],
199            HeapSpace::Stable => &self.stable_entries[raw_index as usize],
200        }
201    }
202
203    #[inline]
204    pub fn get_mut(&mut self, index: u32) -> &mut ArenaEntry<T> {
205        let (space, raw_index) = Self::decode_index(index);
206        match space {
207            HeapSpace::Young => &mut self.young_entries[raw_index as usize],
208            HeapSpace::Yard => &mut self.yard_entries[raw_index as usize],
209            HeapSpace::Handoff => &mut self.handoff_entries[raw_index as usize],
210            HeapSpace::Stable => &mut self.stable_entries[raw_index as usize],
211        }
212    }
213
214    #[inline]
215    pub(crate) fn encode_index(space: HeapSpace, index: u32) -> u32 {
216        ((space as u32) << HEAP_SPACE_SHIFT) | index
217    }
218
219    #[inline]
220    pub(crate) fn encode_yard_index(index: u32) -> u32 {
221        Self::encode_index(HeapSpace::Yard, index)
222    }
223
224    #[inline]
225    pub(crate) fn encode_stable_index(index: u32) -> u32 {
226        Self::encode_index(HeapSpace::Stable, index)
227    }
228
229    #[inline]
230    pub(crate) fn encode_handoff_index(index: u32) -> u32 {
231        Self::encode_index(HeapSpace::Handoff, index)
232    }
233
234    #[inline]
235    pub(crate) fn decode_index(index: u32) -> (HeapSpace, u32) {
236        let space = match (index & HEAP_SPACE_MASK_U32) >> HEAP_SPACE_SHIFT {
237            0 => HeapSpace::Young,
238            1 => HeapSpace::Yard,
239            2 => HeapSpace::Handoff,
240            3 => HeapSpace::Stable,
241            _ => unreachable!("invalid heap space bits"),
242        };
243        (space, index & HEAP_INDEX_MASK_U32)
244    }
245
246    #[inline]
247    pub fn is_stable_index(index: u32) -> bool {
248        matches!(Self::decode_index(index).0, HeapSpace::Stable)
249    }
250
251    #[inline]
252    pub fn is_yard_index_in_region(&self, index: u32, mark: u32) -> bool {
253        let (space, raw_index) = Self::decode_index(index);
254        matches!(space, HeapSpace::Yard)
255            && raw_index >= mark
256            && raw_index < self.yard_entries.len() as u32
257    }
258
259    #[inline]
260    pub fn is_handoff_index_in_region(&self, index: u32, mark: u32) -> bool {
261        let (space, raw_index) = Self::decode_index(index);
262        matches!(space, HeapSpace::Handoff)
263            && raw_index >= mark
264            && raw_index < self.handoff_entries.len() as u32
265    }
266
267    #[inline]
268    pub fn is_young_index_in_region(&self, index: u32, mark: u32) -> bool {
269        let (space, raw_index) = Self::decode_index(index);
270        matches!(space, HeapSpace::Young)
271            && raw_index >= mark
272            && raw_index < self.young_entries.len() as u32
273    }
274
275    #[inline]
276    pub fn young_len(&self) -> usize {
277        self.young_entries.len()
278    }
279
280    #[inline]
281    pub fn yard_len(&self) -> usize {
282        self.yard_entries.len()
283    }
284
285    #[inline]
286    pub fn handoff_len(&self) -> usize {
287        self.handoff_entries.len()
288    }
289
290    #[inline]
291    pub fn stable_len(&self) -> usize {
292        self.stable_entries.len()
293    }
294
295    #[inline]
296    pub fn usage(&self) -> ArenaUsage {
297        ArenaUsage {
298            young: self.young_entries.len(),
299            yard: self.yard_entries.len(),
300            handoff: self.handoff_entries.len(),
301            stable: self.stable_entries.len(),
302        }
303    }
304
305    #[inline]
306    pub fn peak_usage(&self) -> ArenaUsage {
307        self.peak_usage
308    }
309
310    #[inline]
311    pub(crate) fn note_peak_usage(&mut self) {
312        let usage = self.usage();
313        self.peak_usage.young = self.peak_usage.young.max(usage.young);
314        self.peak_usage.yard = self.peak_usage.yard.max(usage.yard);
315        self.peak_usage.handoff = self.peak_usage.handoff.max(usage.handoff);
316        self.peak_usage.stable = self.peak_usage.stable.max(usage.stable);
317    }
318
319    #[inline]
320    pub(crate) fn take_u32_scratch(slot: &mut Vec<u32>, len: usize) -> Vec<u32> {
321        let mut scratch = core::mem::take(slot);
322        scratch.clear();
323        scratch.resize(len, u32::MAX);
324        scratch
325    }
326
327    #[inline]
328    pub(crate) fn recycle_u32_scratch(slot: &mut Vec<u32>, mut scratch: Vec<u32>) {
329        scratch.clear();
330        *slot = scratch;
331    }
332
333    #[inline]
334    pub fn is_frame_local_index(
335        &self,
336        index: u32,
337        arena_mark: u32,
338        yard_mark: u32,
339        handoff_mark: u32,
340    ) -> bool {
341        self.is_young_index_in_region(index, arena_mark)
342            || self.is_yard_index_in_region(index, yard_mark)
343            || self.is_handoff_index_in_region(index, handoff_mark)
344    }
345
346    pub fn with_alloc_space<R>(
347        &mut self,
348        space: AllocSpace,
349        f: impl FnOnce(&mut Arena<T>) -> R,
350    ) -> R {
351        let prev = self.alloc_space;
352        self.alloc_space = space;
353        let out = f(self);
354        self.alloc_space = prev;
355        out
356    }
357
358    /// Push an entry, inheriting the allocation space from a source value.
359    /// If the source lives in yard or handoff, the result is placed there too,
360    /// avoiding a pointless young→yard/handoff promotion later.
361    pub fn push_inheriting_source_space(&mut self, entry: ArenaEntry<T>, source: NanValue) -> u32 {
362        if let Some(index) = source.heap_index() {
363            let (space, _) = Self::decode_index(index);
364            let target = match space {
365                HeapSpace::Yard => Some(AllocSpace::Yard),
366                HeapSpace::Handoff => Some(AllocSpace::Handoff),
367                _ => None,
368            };
369            if let Some(target) = target {
370                let prev = self.alloc_space;
371                self.alloc_space = target;
372                let idx = self.push(entry);
373                self.alloc_space = prev;
374                return idx;
375            }
376        }
377        self.push(entry)
378    }
379
380    // -- Typed push helpers ------------------------------------------------
381
382    pub fn push_i64(&mut self, val: i64) -> u32 {
383        self.push(ArenaEntry::Int(val))
384    }
385    pub fn push_string(&mut self, s: &str) -> u32 {
386        self.push(ArenaEntry::String(Rc::from(s)))
387    }
388    pub fn push_boxed(&mut self, val: NanValue) -> u32 {
389        self.push(ArenaEntry::Boxed(val))
390    }
391    pub fn push_record(&mut self, type_id: u32, fields: Vec<NanValue>) -> u32 {
392        self.push(ArenaEntry::Record { type_id, fields })
393    }
394    pub fn push_variant(&mut self, type_id: u32, variant_id: u16, fields: Vec<NanValue>) -> u32 {
395        self.push(ArenaEntry::Variant {
396            type_id,
397            variant_id,
398            fields,
399        })
400    }
401    pub fn push_list(&mut self, items: Vec<NanValue>) -> u32 {
402        self.push(ArenaEntry::List(ArenaList::Flat {
403            items: Rc::new(items),
404            start: 0,
405        }))
406    }
407    pub fn push_map(&mut self, map: T::Map) -> u32 {
408        self.push(ArenaEntry::Map(map))
409    }
410    pub fn push_tuple(&mut self, items: Vec<NanValue>) -> u32 {
411        self.push(ArenaEntry::Tuple(items))
412    }
413    pub fn push_vector(&mut self, items: Vec<NanValue>) -> u32 {
414        self.push(ArenaEntry::Vector(items))
415    }
416    pub fn push_fn(&mut self, f: Rc<T::Fn>) -> u32 {
417        self.push_symbol(ArenaSymbol::Fn(f))
418    }
419    pub fn push_builtin(&mut self, name: &str) -> u32 {
420        self.push_symbol(ArenaSymbol::Builtin(Rc::from(name)))
421    }
422    pub fn push_nullary_variant_symbol(&mut self, ctor_id: u32) -> u32 {
423        self.push_symbol(ArenaSymbol::NullaryVariant { ctor_id })
424    }
425
426    // -- Typed getters -----------------------------------------------------
427
428    pub fn get_i64(&self, index: u32) -> i64 {
429        match self.get(index) {
430            ArenaEntry::Int(i) => *i,
431            _ => panic!("Arena: expected Int at {}", index),
432        }
433    }
434    pub fn get_string(&self, index: u32) -> &str {
435        match self.get(index) {
436            ArenaEntry::String(s) => s,
437            other => panic!("Arena: expected String at {} but found {:?}", index, other),
438        }
439    }
440    pub fn get_string_value(&self, value: NanValue) -> NanString<'_> {
441        if let Some(s) = value.small_string() {
442            s
443        } else {
444            NanString::Borrowed(self.get_string(value.arena_index()))
445        }
446    }
447    pub fn get_boxed(&self, index: u32) -> NanValue {
448        match self.get(index) {
449            ArenaEntry::Boxed(v) => *v,
450            _ => panic!("Arena: expected Boxed at {}", index),
451        }
452    }
453    pub fn get_record(&self, index: u32) -> (u32, &[NanValue]) {
454        match self.get(index) {
455            ArenaEntry::Record { type_id, fields } => (*type_id, fields),
456            _ => panic!("Arena: expected Record at {}", index),
457        }
458    }
459    pub fn get_variant(&self, index: u32) -> (u32, u16, &[NanValue]) {
460        match self.get(index) {
461            ArenaEntry::Variant {
462                type_id,
463                variant_id,
464                fields,
465            } => (*type_id, *variant_id, fields),
466            other => panic!("Arena: expected Variant at {} but found {:?}", index, other),
467        }
468    }
469    pub fn get_list(&self, index: u32) -> &ArenaList {
470        match self.get(index) {
471            ArenaEntry::List(items) => items,
472            _ => panic!("Arena: expected List at {}", index),
473        }
474    }
475    pub fn get_tuple(&self, index: u32) -> &[NanValue] {
476        match self.get(index) {
477            ArenaEntry::Tuple(items) => items,
478            _ => panic!("Arena: expected Tuple at {}", index),
479        }
480    }
481    pub fn get_vector(&self, index: u32) -> &[NanValue] {
482        match self.get(index) {
483            ArenaEntry::Vector(items) => items,
484            _ => panic!("Arena: expected Vector at {}", index),
485        }
486    }
487    pub fn get_vector_mut(&mut self, index: u32) -> &mut Vec<NanValue> {
488        match self.get_mut(index) {
489            ArenaEntry::Vector(items) => items,
490            _ => panic!("Arena: expected Vector at {}", index),
491        }
492    }
493    pub fn vector_ref_value(&self, value: NanValue) -> &[NanValue] {
494        if value.is_empty_vector_immediate() {
495            return &[];
496        }
497        self.get_vector(value.arena_index())
498    }
499    pub fn clone_vector_value(&self, value: NanValue) -> Vec<NanValue> {
500        if value.is_empty_vector_immediate() {
501            Vec::new()
502        } else {
503            self.get_vector(value.arena_index()).to_vec()
504        }
505    }
506    /// Take ownership of a vector, replacing the arena slot with an empty vec.
507    pub fn take_vector_value(&mut self, value: NanValue) -> Vec<NanValue> {
508        if value.is_empty_vector_immediate() {
509            Vec::new()
510        } else {
511            let index = value.arena_index();
512            std::mem::take(self.get_vector_mut(index))
513        }
514    }
515    pub fn get_map(&self, index: u32) -> &T::Map {
516        match self.get(index) {
517            ArenaEntry::Map(map) => map,
518            _ => panic!("Arena: expected Map at {}", index),
519        }
520    }
521    pub fn get_map_mut(&mut self, index: u32) -> &mut T::Map {
522        match self.get_mut(index) {
523            ArenaEntry::Map(map) => map,
524            _ => panic!("Arena: expected Map at {}", index),
525        }
526    }
527    pub fn map_ref_value(&self, map: NanValue) -> &T::Map {
528        if map.is_empty_map_immediate() {
529            // Use a leaked singleton for the empty map reference.
530            // This avoids thread_local! which is not available in no_std.
531            use core::sync::atomic::{AtomicPtr, Ordering as AtomicOrdering};
532            static EMPTY_MAP_PTR: AtomicPtr<()> = AtomicPtr::new(core::ptr::null_mut());
533
534            let ptr = EMPTY_MAP_PTR.load(AtomicOrdering::Acquire);
535            if !ptr.is_null() {
536                // SAFETY: ptr was allocated via Box::leak and is valid for 'static
537                return unsafe { &*(ptr as *const T::Map) };
538            }
539            let boxed = alloc::boxed::Box::new(T::Map::new());
540            let leaked: &'static T::Map = alloc::boxed::Box::leak(boxed);
541            let new_ptr = leaked as *const T::Map as *mut ();
542            // If another thread raced us, that's fine — we just leak one extra allocation
543            EMPTY_MAP_PTR.store(new_ptr, AtomicOrdering::Release);
544            leaked
545        } else {
546            self.get_map(map.arena_index())
547        }
548    }
549    pub fn clone_map_value(&self, map: NanValue) -> T::Map {
550        if map.is_empty_map_immediate() {
551            T::Map::new()
552        } else {
553            self.get_map(map.arena_index()).clone()
554        }
555    }
556    /// Take ownership of a map value, replacing it with an empty map in the arena.
557    /// Use when the caller is the sole owner (reuse analysis says `owned = true`).
558    /// Avoids the O(n) clone — the original slot becomes empty.
559    pub fn take_map_value(&mut self, map: NanValue) -> T::Map {
560        if map.is_empty_map_immediate() {
561            T::Map::new()
562        } else {
563            let index = map.arena_index();
564            std::mem::replace(self.get_map_mut(index), T::Map::new())
565        }
566    }
567    pub fn get_fn(&self, index: u32) -> &T::Fn {
568        match &self.symbol_entries[index as usize] {
569            ArenaSymbol::Fn(f) => f,
570            _ => panic!("Arena: expected Fn symbol at {}", index),
571        }
572    }
573    pub fn get_fn_rc(&self, index: u32) -> &Rc<T::Fn> {
574        match &self.symbol_entries[index as usize] {
575            ArenaSymbol::Fn(f) => f,
576            _ => panic!("Arena: expected Fn symbol at {}", index),
577        }
578    }
579    pub fn get_builtin(&self, index: u32) -> &str {
580        match &self.symbol_entries[index as usize] {
581            ArenaSymbol::Builtin(s) => s,
582            _ => panic!("Arena: expected Builtin symbol at {}", index),
583        }
584    }
585    pub fn get_namespace(&self, index: u32) -> (&str, &[(Rc<str>, NanValue)]) {
586        match &self.symbol_entries[index as usize] {
587            ArenaSymbol::Namespace { name, members } => (name, members),
588            _ => panic!("Arena: expected Namespace symbol at {}", index),
589        }
590    }
591    pub fn get_nullary_variant_ctor(&self, index: u32) -> u32 {
592        match &self.symbol_entries[index as usize] {
593            ArenaSymbol::NullaryVariant { ctor_id } => *ctor_id,
594            _ => panic!("Arena: expected NullaryVariant symbol at {}", index),
595        }
596    }
597
598    // -- Type registry -----------------------------------------------------
599
600    pub fn register_record_type(&mut self, name: &str, field_names: Vec<String>) -> u32 {
601        let id = self.type_names.len() as u32;
602        self.type_names.push(String::from(name));
603        self.type_field_names.push(field_names);
604        self.type_variant_names.push(Vec::new());
605        self.type_variant_ctor_ids.push(Vec::new());
606        id
607    }
608
609    pub fn register_sum_type(&mut self, name: &str, variant_names: Vec<String>) -> u32 {
610        let id = self.type_names.len() as u32;
611        self.type_names.push(String::from(name));
612        self.type_field_names.push(Vec::new());
613        let ctor_ids: Vec<u32> = (0..variant_names.len())
614            .map(|variant_idx| {
615                let ctor_id = self.ctor_to_type_variant.len() as u32;
616                self.ctor_to_type_variant.push((id, variant_idx as u16));
617                ctor_id
618            })
619            .collect();
620        self.type_variant_names.push(variant_names);
621        self.type_variant_ctor_ids.push(ctor_ids);
622        id
623    }
624
625    pub fn register_variant_name(&mut self, type_id: u32, variant_name: String) -> u16 {
626        let variants = &mut self.type_variant_names[type_id as usize];
627        let variant_id = variants.len() as u16;
628        variants.push(variant_name);
629
630        let ctor_id = self.ctor_to_type_variant.len() as u32;
631        self.ctor_to_type_variant.push((type_id, variant_id));
632        self.type_variant_ctor_ids[type_id as usize].push(ctor_id);
633
634        variant_id
635    }
636
637    pub fn get_type_name(&self, type_id: u32) -> &str {
638        &self.type_names[type_id as usize]
639    }
640    pub fn type_count(&self) -> u32 {
641        self.type_names.len() as u32
642    }
643    pub fn get_field_names(&self, type_id: u32) -> &[String] {
644        &self.type_field_names[type_id as usize]
645    }
646    pub fn get_variant_name(&self, type_id: u32, variant_id: u16) -> &str {
647        &self.type_variant_names[type_id as usize][variant_id as usize]
648    }
649    pub fn register_type_alias(&mut self, alias: &str, type_id: u32) {
650        self.type_aliases.push((alias.to_string(), type_id));
651    }
652
653    pub fn find_type_id(&self, name: &str) -> Option<u32> {
654        self.type_names
655            .iter()
656            .position(|n| n == name)
657            .map(|i| i as u32)
658            .or_else(|| {
659                self.type_aliases
660                    .iter()
661                    .find(|(alias, _)| alias == name)
662                    .map(|(_, id)| *id)
663            })
664    }
665    pub fn find_variant_id(&self, type_id: u32, variant_name: &str) -> Option<u16> {
666        self.type_variant_names
667            .get(type_id as usize)?
668            .iter()
669            .position(|n| n == variant_name)
670            .map(|i| i as u16)
671    }
672
673    pub fn find_ctor_id(&self, type_id: u32, variant_id: u16) -> Option<u32> {
674        self.type_variant_ctor_ids
675            .get(type_id as usize)?
676            .get(variant_id as usize)
677            .copied()
678    }
679
680    pub fn get_ctor_parts(&self, ctor_id: u32) -> (u32, u16) {
681        self.ctor_to_type_variant
682            .get(ctor_id as usize)
683            .copied()
684            .unwrap_or_else(|| panic!("Arena: expected ctor id {} to be registered", ctor_id))
685    }
686
687    pub fn len(&self) -> usize {
688        self.young_entries.len()
689            + self.yard_entries.len()
690            + self.handoff_entries.len()
691            + self.stable_entries.len()
692    }
693    pub fn is_empty(&self) -> bool {
694        self.young_entries.is_empty()
695            && self.yard_entries.is_empty()
696            && self.handoff_entries.is_empty()
697            && self.stable_entries.is_empty()
698    }
699}
700
701impl<T: ArenaTypes> Default for Arena<T> {
702    fn default() -> Self {
703        Self::new()
704    }
705}