1use super::*;
2
3impl<T: ArenaTypes> Arena<T> {
4 pub fn new() -> Self {
5 Arena {
6 young_entries: Vec::with_capacity(256),
7 yard_entries: Vec::with_capacity(64),
8 handoff_entries: Vec::with_capacity(64),
9 stable_entries: Vec::with_capacity(64),
10 scratch_young: Vec::new(),
11 scratch_yard: Vec::new(),
12 scratch_handoff: Vec::new(),
13 scratch_stable: Vec::new(),
14 peak_usage: ArenaUsage::default(),
15 alloc_space: AllocSpace::Young,
16 type_names: Vec::new(),
17 type_field_names: Vec::new(),
18 type_variant_names: Vec::new(),
19 type_variant_ctor_ids: Vec::new(),
20 ctor_to_type_variant: Vec::new(),
21 symbol_entries: Vec::new(),
22 }
23 }
24
25 pub fn clone_static(&self) -> Self {
30 Arena {
31 young_entries: Vec::with_capacity(64),
32 yard_entries: Vec::new(),
33 handoff_entries: Vec::new(),
34 stable_entries: self.stable_entries.clone(),
35 scratch_young: Vec::new(),
36 scratch_yard: Vec::new(),
37 scratch_handoff: Vec::new(),
38 scratch_stable: Vec::new(),
39 peak_usage: ArenaUsage::default(),
40 alloc_space: AllocSpace::Young,
41 type_names: self.type_names.clone(),
42 type_field_names: self.type_field_names.clone(),
43 type_variant_names: self.type_variant_names.clone(),
44 type_variant_ctor_ids: self.type_variant_ctor_ids.clone(),
45 ctor_to_type_variant: self.ctor_to_type_variant.clone(),
46 symbol_entries: self.symbol_entries.clone(),
47 }
48 }
49
50 pub fn deep_import(&mut self, value: NanValue, source: &Arena<T>) -> NanValue {
54 if !value.is_nan_boxed() {
56 return value;
57 }
58 let heap_idx = match value.heap_index() {
60 Some(idx) => idx,
61 None => return value, };
63
64 let entry = source.get(heap_idx).clone();
65 match entry {
66 ArenaEntry::Int(i) => NanValue::new_int(i, self),
67 ArenaEntry::String(s) => {
68 let idx = self.push(ArenaEntry::String(s));
69 NanValue::new_string(idx)
70 }
71 ArenaEntry::Tuple(items) => {
72 let imported: Vec<NanValue> =
73 items.iter().map(|v| self.deep_import(*v, source)).collect();
74 let idx = self.push_tuple(imported);
75 NanValue::new_tuple(idx)
76 }
77 ArenaEntry::List(_) => {
78 let flat = source.list_to_vec_value(value);
80 let imported: Vec<NanValue> =
81 flat.iter().map(|v| self.deep_import(*v, source)).collect();
82 if imported.is_empty() {
83 NanValue::EMPTY_LIST
84 } else {
85 let rc_items = Rc::new(imported);
86 let idx = self.push(ArenaEntry::List(ArenaList::Flat {
87 items: rc_items,
88 start: 0,
89 }));
90 NanValue::new_list(idx)
91 }
92 }
93 ArenaEntry::Map(map) => {
94 let mut new_map = T::Map::new();
95 for (hash, (k, v)) in map.iter() {
96 let ik = self.deep_import(*k, source);
97 let iv = self.deep_import(*v, source);
98 new_map = new_map.insert(*hash, (ik, iv));
99 }
100 let idx = self.push(ArenaEntry::Map(new_map));
101 NanValue::new_map(idx)
102 }
103 ArenaEntry::Vector(items) => {
104 let imported: Vec<NanValue> =
105 items.iter().map(|v| self.deep_import(*v, source)).collect();
106 let idx = self.push(ArenaEntry::Vector(imported));
107 NanValue::new_vector(idx)
108 }
109 ArenaEntry::Record { type_id, fields } => {
110 let imported: Vec<NanValue> = fields
111 .iter()
112 .map(|v| self.deep_import(*v, source))
113 .collect();
114 let idx = self.push(ArenaEntry::Record {
115 type_id,
116 fields: imported,
117 });
118 NanValue::new_record(idx)
119 }
120 ArenaEntry::Variant {
121 type_id,
122 variant_id,
123 fields,
124 } => {
125 let imported: Vec<NanValue> = fields
126 .iter()
127 .map(|v| self.deep_import(*v, source))
128 .collect();
129 let idx = self.push(ArenaEntry::Variant {
130 type_id,
131 variant_id,
132 fields: imported,
133 });
134 NanValue::new_variant(idx)
135 }
136 ArenaEntry::Boxed(inner) => {
137 let imported = self.deep_import(inner, source);
138 let idx = self.push(ArenaEntry::Boxed(imported));
139 NanValue::encode(value.tag(), ARENA_REF_BIT | (idx as u64))
140 }
141 ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => value,
143 }
144 }
145
146 #[inline]
147 pub fn push(&mut self, entry: ArenaEntry<T>) -> u32 {
148 match &entry {
149 ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => {}
150 _ => {
151 return match self.alloc_space {
152 AllocSpace::Young => {
153 let idx = self.young_entries.len() as u32;
154 self.young_entries.push(entry);
155 self.note_peak_usage();
156 Self::encode_index(HeapSpace::Young, idx)
157 }
158 AllocSpace::Yard => {
159 let idx = self.yard_entries.len() as u32;
160 self.yard_entries.push(entry);
161 self.note_peak_usage();
162 Self::encode_index(HeapSpace::Yard, idx)
163 }
164 AllocSpace::Handoff => {
165 let idx = self.handoff_entries.len() as u32;
166 self.handoff_entries.push(entry);
167 self.note_peak_usage();
168 Self::encode_index(HeapSpace::Handoff, idx)
169 }
170 };
171 }
172 }
173 match entry {
174 ArenaEntry::Fn(f) => self.push_symbol(ArenaSymbol::Fn(f)),
175 ArenaEntry::Builtin(name) => self.push_symbol(ArenaSymbol::Builtin(name)),
176 ArenaEntry::Namespace { name, members } => {
177 self.push_symbol(ArenaSymbol::Namespace { name, members })
178 }
179 _ => unreachable!("non-symbol entry already returned above"),
180 }
181 }
182
183 #[inline]
184 pub fn push_symbol(&mut self, symbol: ArenaSymbol<T>) -> u32 {
185 let idx = self.symbol_entries.len() as u32;
186 self.symbol_entries.push(symbol);
187 idx
188 }
189
190 #[inline]
191 pub fn get(&self, index: u32) -> &ArenaEntry<T> {
192 let (space, raw_index) = Self::decode_index(index);
193 match space {
194 HeapSpace::Young => &self.young_entries[raw_index as usize],
195 HeapSpace::Yard => &self.yard_entries[raw_index as usize],
196 HeapSpace::Handoff => &self.handoff_entries[raw_index as usize],
197 HeapSpace::Stable => &self.stable_entries[raw_index as usize],
198 }
199 }
200
201 #[inline]
202 pub(crate) fn encode_index(space: HeapSpace, index: u32) -> u32 {
203 ((space as u32) << HEAP_SPACE_SHIFT) | index
204 }
205
206 #[inline]
207 pub(crate) fn encode_yard_index(index: u32) -> u32 {
208 Self::encode_index(HeapSpace::Yard, index)
209 }
210
211 #[inline]
212 pub(crate) fn encode_stable_index(index: u32) -> u32 {
213 Self::encode_index(HeapSpace::Stable, index)
214 }
215
216 #[inline]
217 pub(crate) fn encode_handoff_index(index: u32) -> u32 {
218 Self::encode_index(HeapSpace::Handoff, index)
219 }
220
221 #[inline]
222 pub(crate) fn decode_index(index: u32) -> (HeapSpace, u32) {
223 let space = match (index & HEAP_SPACE_MASK_U32) >> HEAP_SPACE_SHIFT {
224 0 => HeapSpace::Young,
225 1 => HeapSpace::Yard,
226 2 => HeapSpace::Handoff,
227 3 => HeapSpace::Stable,
228 _ => unreachable!("invalid heap space bits"),
229 };
230 (space, index & HEAP_INDEX_MASK_U32)
231 }
232
233 #[inline]
234 pub fn is_stable_index(index: u32) -> bool {
235 matches!(Self::decode_index(index).0, HeapSpace::Stable)
236 }
237
238 #[inline]
239 pub fn is_yard_index_in_region(&self, index: u32, mark: u32) -> bool {
240 let (space, raw_index) = Self::decode_index(index);
241 matches!(space, HeapSpace::Yard)
242 && raw_index >= mark
243 && raw_index < self.yard_entries.len() as u32
244 }
245
246 #[inline]
247 pub fn is_handoff_index_in_region(&self, index: u32, mark: u32) -> bool {
248 let (space, raw_index) = Self::decode_index(index);
249 matches!(space, HeapSpace::Handoff)
250 && raw_index >= mark
251 && raw_index < self.handoff_entries.len() as u32
252 }
253
254 #[inline]
255 pub fn is_young_index_in_region(&self, index: u32, mark: u32) -> bool {
256 let (space, raw_index) = Self::decode_index(index);
257 matches!(space, HeapSpace::Young)
258 && raw_index >= mark
259 && raw_index < self.young_entries.len() as u32
260 }
261
262 #[inline]
263 pub fn young_len(&self) -> usize {
264 self.young_entries.len()
265 }
266
267 #[inline]
268 pub fn yard_len(&self) -> usize {
269 self.yard_entries.len()
270 }
271
272 #[inline]
273 pub fn handoff_len(&self) -> usize {
274 self.handoff_entries.len()
275 }
276
277 #[inline]
278 pub fn stable_len(&self) -> usize {
279 self.stable_entries.len()
280 }
281
282 #[inline]
283 pub fn usage(&self) -> ArenaUsage {
284 ArenaUsage {
285 young: self.young_entries.len(),
286 yard: self.yard_entries.len(),
287 handoff: self.handoff_entries.len(),
288 stable: self.stable_entries.len(),
289 }
290 }
291
292 #[inline]
293 pub fn peak_usage(&self) -> ArenaUsage {
294 self.peak_usage
295 }
296
297 #[inline]
298 pub(crate) fn note_peak_usage(&mut self) {
299 let usage = self.usage();
300 self.peak_usage.young = self.peak_usage.young.max(usage.young);
301 self.peak_usage.yard = self.peak_usage.yard.max(usage.yard);
302 self.peak_usage.handoff = self.peak_usage.handoff.max(usage.handoff);
303 self.peak_usage.stable = self.peak_usage.stable.max(usage.stable);
304 }
305
306 #[inline]
307 pub(crate) fn take_u32_scratch(slot: &mut Vec<u32>, len: usize) -> Vec<u32> {
308 let mut scratch = core::mem::take(slot);
309 scratch.clear();
310 scratch.resize(len, u32::MAX);
311 scratch
312 }
313
314 #[inline]
315 pub(crate) fn recycle_u32_scratch(slot: &mut Vec<u32>, mut scratch: Vec<u32>) {
316 scratch.clear();
317 *slot = scratch;
318 }
319
320 #[inline]
321 pub fn is_frame_local_index(
322 &self,
323 index: u32,
324 arena_mark: u32,
325 yard_mark: u32,
326 handoff_mark: u32,
327 ) -> bool {
328 self.is_young_index_in_region(index, arena_mark)
329 || self.is_yard_index_in_region(index, yard_mark)
330 || self.is_handoff_index_in_region(index, handoff_mark)
331 }
332
333 pub fn with_alloc_space<R>(
334 &mut self,
335 space: AllocSpace,
336 f: impl FnOnce(&mut Arena<T>) -> R,
337 ) -> R {
338 let prev = self.alloc_space;
339 self.alloc_space = space;
340 let out = f(self);
341 self.alloc_space = prev;
342 out
343 }
344
345 pub fn push_i64(&mut self, val: i64) -> u32 {
348 self.push(ArenaEntry::Int(val))
349 }
350 pub fn push_string(&mut self, s: &str) -> u32 {
351 self.push(ArenaEntry::String(Rc::from(s)))
352 }
353 pub fn push_boxed(&mut self, val: NanValue) -> u32 {
354 self.push(ArenaEntry::Boxed(val))
355 }
356 pub fn push_record(&mut self, type_id: u32, fields: Vec<NanValue>) -> u32 {
357 self.push(ArenaEntry::Record { type_id, fields })
358 }
359 pub fn push_variant(&mut self, type_id: u32, variant_id: u16, fields: Vec<NanValue>) -> u32 {
360 self.push(ArenaEntry::Variant {
361 type_id,
362 variant_id,
363 fields,
364 })
365 }
366 pub fn push_list(&mut self, items: Vec<NanValue>) -> u32 {
367 self.push(ArenaEntry::List(ArenaList::Flat {
368 items: Rc::new(items),
369 start: 0,
370 }))
371 }
372 pub fn push_map(&mut self, map: T::Map) -> u32 {
373 self.push(ArenaEntry::Map(map))
374 }
375 pub fn push_tuple(&mut self, items: Vec<NanValue>) -> u32 {
376 self.push(ArenaEntry::Tuple(items))
377 }
378 pub fn push_vector(&mut self, items: Vec<NanValue>) -> u32 {
379 self.push(ArenaEntry::Vector(items))
380 }
381 pub fn push_fn(&mut self, f: Rc<T::Fn>) -> u32 {
382 self.push_symbol(ArenaSymbol::Fn(f))
383 }
384 pub fn push_builtin(&mut self, name: &str) -> u32 {
385 self.push_symbol(ArenaSymbol::Builtin(Rc::from(name)))
386 }
387 pub fn push_nullary_variant_symbol(&mut self, ctor_id: u32) -> u32 {
388 self.push_symbol(ArenaSymbol::NullaryVariant { ctor_id })
389 }
390
391 pub fn get_i64(&self, index: u32) -> i64 {
394 match self.get(index) {
395 ArenaEntry::Int(i) => *i,
396 _ => panic!("Arena: expected Int at {}", index),
397 }
398 }
399 pub fn get_string(&self, index: u32) -> &str {
400 match self.get(index) {
401 ArenaEntry::String(s) => s,
402 other => panic!("Arena: expected String at {} but found {:?}", index, other),
403 }
404 }
405 pub fn get_string_value(&self, value: NanValue) -> NanString<'_> {
406 if let Some(s) = value.small_string() {
407 s
408 } else {
409 NanString::Borrowed(self.get_string(value.arena_index()))
410 }
411 }
412 pub fn get_boxed(&self, index: u32) -> NanValue {
413 match self.get(index) {
414 ArenaEntry::Boxed(v) => *v,
415 _ => panic!("Arena: expected Boxed at {}", index),
416 }
417 }
418 pub fn get_record(&self, index: u32) -> (u32, &[NanValue]) {
419 match self.get(index) {
420 ArenaEntry::Record { type_id, fields } => (*type_id, fields),
421 _ => panic!("Arena: expected Record at {}", index),
422 }
423 }
424 pub fn get_variant(&self, index: u32) -> (u32, u16, &[NanValue]) {
425 match self.get(index) {
426 ArenaEntry::Variant {
427 type_id,
428 variant_id,
429 fields,
430 } => (*type_id, *variant_id, fields),
431 other => panic!("Arena: expected Variant at {} but found {:?}", index, other),
432 }
433 }
434 pub fn get_list(&self, index: u32) -> &ArenaList {
435 match self.get(index) {
436 ArenaEntry::List(items) => items,
437 _ => panic!("Arena: expected List at {}", index),
438 }
439 }
440 pub fn get_tuple(&self, index: u32) -> &[NanValue] {
441 match self.get(index) {
442 ArenaEntry::Tuple(items) => items,
443 _ => panic!("Arena: expected Tuple at {}", index),
444 }
445 }
446 pub fn get_vector(&self, index: u32) -> &[NanValue] {
447 match self.get(index) {
448 ArenaEntry::Vector(items) => items,
449 _ => panic!("Arena: expected Vector at {}", index),
450 }
451 }
452 pub fn vector_ref_value(&self, value: NanValue) -> &[NanValue] {
453 if value.is_empty_vector_immediate() {
454 return &[];
455 }
456 self.get_vector(value.arena_index())
457 }
458 pub fn clone_vector_value(&self, value: NanValue) -> Vec<NanValue> {
459 if value.is_empty_vector_immediate() {
460 Vec::new()
461 } else {
462 self.get_vector(value.arena_index()).to_vec()
463 }
464 }
465 pub fn get_map(&self, index: u32) -> &T::Map {
466 match self.get(index) {
467 ArenaEntry::Map(map) => map,
468 _ => panic!("Arena: expected Map at {}", index),
469 }
470 }
471 pub fn map_ref_value(&self, map: NanValue) -> &T::Map {
472 if map.is_empty_map_immediate() {
473 use core::sync::atomic::{AtomicPtr, Ordering as AtomicOrdering};
476 static EMPTY_MAP_PTR: AtomicPtr<()> = AtomicPtr::new(core::ptr::null_mut());
477
478 let ptr = EMPTY_MAP_PTR.load(AtomicOrdering::Acquire);
479 if !ptr.is_null() {
480 return unsafe { &*(ptr as *const T::Map) };
482 }
483 let boxed = alloc::boxed::Box::new(T::Map::new());
484 let leaked: &'static T::Map = alloc::boxed::Box::leak(boxed);
485 let new_ptr = leaked as *const T::Map as *mut ();
486 EMPTY_MAP_PTR.store(new_ptr, AtomicOrdering::Release);
488 leaked
489 } else {
490 self.get_map(map.arena_index())
491 }
492 }
493 pub fn clone_map_value(&self, map: NanValue) -> T::Map {
494 if map.is_empty_map_immediate() {
495 T::Map::new()
496 } else {
497 self.get_map(map.arena_index()).clone()
498 }
499 }
500 pub fn get_fn(&self, index: u32) -> &T::Fn {
501 match &self.symbol_entries[index as usize] {
502 ArenaSymbol::Fn(f) => f,
503 _ => panic!("Arena: expected Fn symbol at {}", index),
504 }
505 }
506 pub fn get_fn_rc(&self, index: u32) -> &Rc<T::Fn> {
507 match &self.symbol_entries[index as usize] {
508 ArenaSymbol::Fn(f) => f,
509 _ => panic!("Arena: expected Fn symbol at {}", index),
510 }
511 }
512 pub fn get_builtin(&self, index: u32) -> &str {
513 match &self.symbol_entries[index as usize] {
514 ArenaSymbol::Builtin(s) => s,
515 _ => panic!("Arena: expected Builtin symbol at {}", index),
516 }
517 }
518 pub fn get_namespace(&self, index: u32) -> (&str, &[(Rc<str>, NanValue)]) {
519 match &self.symbol_entries[index as usize] {
520 ArenaSymbol::Namespace { name, members } => (name, members),
521 _ => panic!("Arena: expected Namespace symbol at {}", index),
522 }
523 }
524 pub fn get_nullary_variant_ctor(&self, index: u32) -> u32 {
525 match &self.symbol_entries[index as usize] {
526 ArenaSymbol::NullaryVariant { ctor_id } => *ctor_id,
527 _ => panic!("Arena: expected NullaryVariant symbol at {}", index),
528 }
529 }
530
531 pub fn register_record_type(&mut self, name: &str, field_names: Vec<String>) -> u32 {
534 let id = self.type_names.len() as u32;
535 self.type_names.push(String::from(name));
536 self.type_field_names.push(field_names);
537 self.type_variant_names.push(Vec::new());
538 self.type_variant_ctor_ids.push(Vec::new());
539 id
540 }
541
542 pub fn register_sum_type(&mut self, name: &str, variant_names: Vec<String>) -> u32 {
543 let id = self.type_names.len() as u32;
544 self.type_names.push(String::from(name));
545 self.type_field_names.push(Vec::new());
546 let ctor_ids: Vec<u32> = (0..variant_names.len())
547 .map(|variant_idx| {
548 let ctor_id = self.ctor_to_type_variant.len() as u32;
549 self.ctor_to_type_variant.push((id, variant_idx as u16));
550 ctor_id
551 })
552 .collect();
553 self.type_variant_names.push(variant_names);
554 self.type_variant_ctor_ids.push(ctor_ids);
555 id
556 }
557
558 pub fn register_variant_name(&mut self, type_id: u32, variant_name: String) -> u16 {
559 let variants = &mut self.type_variant_names[type_id as usize];
560 let variant_id = variants.len() as u16;
561 variants.push(variant_name);
562
563 let ctor_id = self.ctor_to_type_variant.len() as u32;
564 self.ctor_to_type_variant.push((type_id, variant_id));
565 self.type_variant_ctor_ids[type_id as usize].push(ctor_id);
566
567 variant_id
568 }
569
570 pub fn get_type_name(&self, type_id: u32) -> &str {
571 &self.type_names[type_id as usize]
572 }
573 pub fn type_count(&self) -> u32 {
574 self.type_names.len() as u32
575 }
576 pub fn get_field_names(&self, type_id: u32) -> &[String] {
577 &self.type_field_names[type_id as usize]
578 }
579 pub fn get_variant_name(&self, type_id: u32, variant_id: u16) -> &str {
580 &self.type_variant_names[type_id as usize][variant_id as usize]
581 }
582 pub fn find_type_id(&self, name: &str) -> Option<u32> {
583 self.type_names
584 .iter()
585 .position(|n| n == name)
586 .map(|i| i as u32)
587 }
588 pub fn find_variant_id(&self, type_id: u32, variant_name: &str) -> Option<u16> {
589 self.type_variant_names
590 .get(type_id as usize)?
591 .iter()
592 .position(|n| n == variant_name)
593 .map(|i| i as u16)
594 }
595
596 pub fn find_ctor_id(&self, type_id: u32, variant_id: u16) -> Option<u32> {
597 self.type_variant_ctor_ids
598 .get(type_id as usize)?
599 .get(variant_id as usize)
600 .copied()
601 }
602
603 pub fn get_ctor_parts(&self, ctor_id: u32) -> (u32, u16) {
604 self.ctor_to_type_variant
605 .get(ctor_id as usize)
606 .copied()
607 .unwrap_or_else(|| panic!("Arena: expected ctor id {} to be registered", ctor_id))
608 }
609
610 pub fn len(&self) -> usize {
611 self.young_entries.len()
612 + self.yard_entries.len()
613 + self.handoff_entries.len()
614 + self.stable_entries.len()
615 }
616 pub fn is_empty(&self) -> bool {
617 self.young_entries.is_empty()
618 && self.yard_entries.is_empty()
619 && self.handoff_entries.is_empty()
620 && self.stable_entries.is_empty()
621 }
622}
623
624impl<T: ArenaTypes> Default for Arena<T> {
625 fn default() -> Self {
626 Self::new()
627 }
628}