Skip to main content

aver_memory/
lib.rs

1//! NaN-boxed compact Value representation (8 bytes per value).
2//!
3//! Layout: every value is a `u64` interpreted as an IEEE 754 `f64`.
4//!
5//! - **Float**: any f64 that is NOT a quiet NaN with our marker -> stored directly.
6//! - **Everything else**: encoded as a quiet NaN with tag + payload in the mantissa.
7//!
8//! IEEE 754 quiet NaN: exponent=0x7FF (all 1s), quiet bit=1, plus our marker bit.
9//! We use `0x7FFC` as 14-bit prefix (bits 63-50), leaving bits 49-0 free.
10//!
11//! ```text
12//! 63      50 49  46 45                    0
13//! +--------+------+------------------------+
14//! | 0x7FFC | tag  |       payload          |
15//! | 14 bits| 4 bit|       46 bits          |
16//! +--------+------+------------------------+
17//! ```
18//!
19//! Tag map:
20//!   0  = Immediate       payload 0-2: false/true/unit
21//!   1  = Symbol          payload bits 0-1: fn/builtin/namespace/nullary-variant; rest=symbol index
22//!   2  = Int             payload bit45: 0=inline(45-bit signed), 1=arena index
23//!   3  = String          payload bit45: 0=inline small string (len + 5 bytes), 1=arena index
24//!   4  = Some            payload bit45: 0=inline inner, 1=arena index
25//!   5  = None            singleton
26//!   6  = Ok              payload bit45: 0=inline inner, 1=arena index
27//!   7  = Err             payload bit45: 0=inline inner, 1=arena index
28//!   8  = List            payload bit45: 0=empty list, 1=arena index
29//!   9  = Vector          payload bit45: 0=empty vector, 1=arena index
30//!   10 = Map             payload bit45: 0=empty map, 1=arena index
31//!   11 = Record          payload bit45: 1=arena index
32//!   12 = Variant         payload bit45: 1=arena index
33//!   13 = Tuple           payload bit45: 1=arena index
34//!   14 = InlineVariant  [45:30]=ctor_id, [29]=kind(0=int,1=imm), [28:0]=value
35//!   15 = (reserved)
36
37#![cfg_attr(not(feature = "std"), no_std)]
38
39extern crate alloc;
40
41use alloc::format;
42use alloc::string::String;
43use alloc::sync::Arc as Rc;
44use alloc::vec::Vec;
45use core::cmp::Ordering;
46use core::hash::{Hash, Hasher};
47use core::ops::Deref;
48
49// ---------------------------------------------------------------------------
50// ArenaTypes trait — parameterises the arena over consumer-specific types
51// ---------------------------------------------------------------------------
52
53/// Trait that defines the function and map types used by the arena.
54///
55/// The arena stores function values and persistent maps, but their concrete
56/// types depend on the consumer (VM, WASM runtime, codegen, etc.).
57pub trait ArenaTypes: Clone + core::fmt::Debug + 'static {
58    /// The function value type (e.g. `FunctionValue` in the VM).
59    type Fn: Clone + core::fmt::Debug + FnValueName;
60    /// The persistent map type (e.g. `AverMap<u64, (NanValue, NanValue)>`).
61    type Map: Clone + core::fmt::Debug + MapLike;
62}
63
64/// Trait for extracting a display name from a function value.
65pub trait FnValueName {
66    fn name(&self) -> &str;
67}
68
69/// Trait abstracting the persistent-map operations needed by the arena.
70///
71/// Implementors provide a hash-keyed map from `u64` to `(NanValue, NanValue)`.
72pub trait MapLike: Sized {
73    fn new() -> Self;
74    fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)>;
75    fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self;
76    fn len(&self) -> usize;
77    fn is_empty(&self) -> bool {
78        self.len() == 0
79    }
80    fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))>;
81    fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)>;
82}
83
84// ---------------------------------------------------------------------------
85// Bit layout constants
86// ---------------------------------------------------------------------------
87
88const QNAN: u64 = 0x7FFC_0000_0000_0000;
89const QNAN_MASK: u64 = 0xFFFC_0000_0000_0000;
90const TAG_SHIFT: u32 = 46;
91const TAG_MASK: u64 = 0xF;
92const PAYLOAD_MASK: u64 = (1u64 << 46) - 1;
93
94pub const TAG_IMMEDIATE: u64 = 0;
95pub const TAG_SYMBOL: u64 = 1;
96pub const TAG_INT: u64 = 2;
97pub const TAG_STRING: u64 = 3;
98pub const TAG_SOME: u64 = 4;
99pub const TAG_NONE: u64 = 5;
100pub const TAG_OK: u64 = 6;
101pub const TAG_ERR: u64 = 7;
102pub const TAG_LIST: u64 = 8;
103pub const TAG_VECTOR: u64 = 9;
104pub const TAG_MAP: u64 = 10;
105pub const TAG_RECORD: u64 = 11;
106pub const TAG_VARIANT: u64 = 12;
107pub const TAG_TUPLE: u64 = 13;
108pub const TAG_INLINE_VARIANT: u64 = 14;
109
110pub const SYMBOL_FN: u64 = 0;
111pub const SYMBOL_BUILTIN: u64 = 1;
112pub const SYMBOL_NAMESPACE: u64 = 2;
113pub const SYMBOL_NULLARY_VARIANT: u64 = 3;
114const SYMBOL_KIND_MASK: u64 = 0b11;
115
116pub const IMM_FALSE: u64 = 0;
117pub const IMM_TRUE: u64 = 1;
118pub const IMM_UNIT: u64 = 2;
119
120pub const WRAP_SOME: u64 = 0;
121pub const WRAP_OK: u64 = 1;
122pub const WRAP_ERR: u64 = 2;
123const WRAPPER_INLINE_KIND_SHIFT: u32 = 43;
124const WRAPPER_INLINE_KIND_MASK: u64 = 0b11 << WRAPPER_INLINE_KIND_SHIFT;
125const WRAPPER_INLINE_PAYLOAD_MASK: u64 = (1u64 << WRAPPER_INLINE_KIND_SHIFT) - 1;
126const WRAPPER_INLINE_IMMEDIATE: u64 = 0;
127const WRAPPER_INLINE_INT: u64 = 1;
128const WRAPPER_INLINE_NONE: u64 = 2;
129const WRAPPER_INT_INLINE_MASK: u64 = WRAPPER_INLINE_PAYLOAD_MASK;
130const WRAPPER_INT_INLINE_MAX: i64 = (1i64 << 42) - 1;
131const WRAPPER_INT_INLINE_MIN: i64 = -(1i64 << 42);
132
133pub const ARENA_REF_BIT: u64 = 1u64 << 45;
134const INT_BIG_BIT: u64 = ARENA_REF_BIT;
135const INT_INLINE_MASK: u64 = (1u64 << 45) - 1;
136pub const INT_INLINE_MAX: i64 = (1i64 << 44) - 1;
137pub const INT_INLINE_MIN: i64 = -(1i64 << 44);
138
139const STRING_ARENA_BIT: u64 = ARENA_REF_BIT;
140const STRING_INLINE_LEN_SHIFT: u32 = 40;
141const STRING_INLINE_LEN_MASK: u64 = 0b111 << STRING_INLINE_LEN_SHIFT;
142const STRING_INLINE_MAX_BYTES: usize = 5;
143
144// -- Inline variant layout --------------------------------------------------
145const IV_CTOR_SHIFT: u32 = 30;
146const IV_CTOR_MASK: u64 = 0xFFFF;
147const IV_KIND_BIT: u64 = 1 << 29;
148const IV_INT_MASK: u64 = (1u64 << 29) - 1;
149const IV_INT_SIGN_BIT: u64 = 1u64 << 28;
150const IV_INT_MAX: i64 = (1i64 << 28) - 1;
151const IV_INT_MIN: i64 = -(1i64 << 28);
152const IV_IMM_SHIFT: u32 = 27;
153const IV_IMM_FALSE: u64 = 0;
154const IV_IMM_TRUE: u64 = 1;
155const IV_IMM_UNIT: u64 = 2;
156const IV_IMM_NONE: u64 = 3;
157
158// ---------------------------------------------------------------------------
159// NanValue - the 8-byte compact value
160// ---------------------------------------------------------------------------
161
162#[derive(Clone, Copy)]
163pub struct NanValue(u64);
164
165#[derive(Clone, Copy, Debug)]
166pub enum NanString<'a> {
167    Borrowed(&'a str),
168    Inline {
169        len: u8,
170        bytes: [u8; STRING_INLINE_MAX_BYTES],
171    },
172}
173
174impl<'a> NanString<'a> {
175    #[inline]
176    pub fn as_str(&self) -> &str {
177        match self {
178            NanString::Borrowed(s) => s,
179            NanString::Inline { len, bytes } => core::str::from_utf8(&bytes[..*len as usize])
180                .expect("NanString inline payload must be valid UTF-8"),
181        }
182    }
183}
184
185impl Deref for NanString<'_> {
186    type Target = str;
187
188    #[inline]
189    fn deref(&self) -> &Self::Target {
190        self.as_str()
191    }
192}
193
194impl PartialEq for NanString<'_> {
195    #[inline]
196    fn eq(&self, other: &Self) -> bool {
197        self.as_str() == other.as_str()
198    }
199}
200
201impl Eq for NanString<'_> {}
202
203impl PartialEq<&str> for NanString<'_> {
204    #[inline]
205    fn eq(&self, other: &&str) -> bool {
206        self.as_str() == *other
207    }
208}
209
210impl PartialEq<NanString<'_>> for &str {
211    #[inline]
212    fn eq(&self, other: &NanString<'_>) -> bool {
213        *self == other.as_str()
214    }
215}
216
217impl PartialOrd for NanString<'_> {
218    #[inline]
219    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
220        Some(self.cmp(other))
221    }
222}
223
224impl Ord for NanString<'_> {
225    #[inline]
226    fn cmp(&self, other: &Self) -> Ordering {
227        self.as_str().cmp(other.as_str())
228    }
229}
230
231impl Hash for NanString<'_> {
232    #[inline]
233    fn hash<H: Hasher>(&self, state: &mut H) {
234        self.as_str().hash(state);
235    }
236}
237
238impl core::fmt::Display for NanString<'_> {
239    #[inline]
240    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
241        f.write_str(self.as_str())
242    }
243}
244
245// -- Encoding / decoding ---------------------------------------------------
246
247impl NanValue {
248    #[inline]
249    fn decode_inline_int_payload(payload: u64) -> i64 {
250        debug_assert!(payload & INT_BIG_BIT == 0);
251        let raw = payload & INT_INLINE_MASK;
252        if raw & (1u64 << 44) != 0 {
253            (raw | !INT_INLINE_MASK) as i64
254        } else {
255            raw as i64
256        }
257    }
258
259    #[inline]
260    pub fn encode(tag: u64, payload: u64) -> Self {
261        debug_assert!(tag <= TAG_MASK);
262        debug_assert!(payload <= PAYLOAD_MASK);
263        NanValue(QNAN | (tag << TAG_SHIFT) | payload)
264    }
265
266    #[inline]
267    pub fn is_nan_boxed(self) -> bool {
268        (self.0 & QNAN_MASK) == QNAN
269    }
270
271    #[inline]
272    pub fn tag(self) -> u64 {
273        (self.0 >> TAG_SHIFT) & TAG_MASK
274    }
275
276    #[inline]
277    pub fn payload(self) -> u64 {
278        self.0 & PAYLOAD_MASK
279    }
280
281    // -- Constructors ------------------------------------------------------
282
283    #[inline]
284    pub fn new_float(f: f64) -> Self {
285        let bits = f.to_bits();
286        if (bits & QNAN_MASK) == QNAN {
287            NanValue(bits ^ 1)
288        } else {
289            NanValue(bits)
290        }
291    }
292
293    #[inline]
294    pub fn as_float(self) -> f64 {
295        f64::from_bits(self.0)
296    }
297
298    #[inline]
299    pub fn new_int_inline(i: i64) -> Self {
300        debug_assert!((INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i));
301        let payload = (i as u64) & INT_INLINE_MASK;
302        Self::encode(TAG_INT, payload)
303    }
304
305    #[inline]
306    pub fn new_int_arena(arena_index: u32) -> Self {
307        Self::encode(TAG_INT, INT_BIG_BIT | (arena_index as u64))
308    }
309
310    #[inline]
311    pub fn new_int<T: ArenaTypes>(i: i64, arena: &mut Arena<T>) -> Self {
312        if (INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i) {
313            Self::new_int_inline(i)
314        } else {
315            let idx = arena.push_i64(i);
316            Self::new_int_arena(idx)
317        }
318    }
319
320    #[inline]
321    pub fn as_int<T: ArenaTypes>(self, arena: &Arena<T>) -> i64 {
322        let p = self.payload();
323        if p & INT_BIG_BIT != 0 {
324            let idx = (p & !INT_BIG_BIT) as u32;
325            arena.get_i64(idx)
326        } else {
327            Self::decode_inline_int_payload(p)
328        }
329    }
330
331    #[inline]
332    pub fn inline_int_payload(self) -> Option<u64> {
333        (self.is_nan_boxed() && self.tag() == TAG_INT && self.payload() & INT_BIG_BIT == 0)
334            .then_some(self.payload())
335    }
336
337    #[inline]
338    pub fn inline_int_value(self) -> Option<i64> {
339        self.inline_int_payload()
340            .map(Self::decode_inline_int_payload)
341    }
342
343    // -- Immediates --------------------------------------------------------
344
345    pub const FALSE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_FALSE);
346    pub const TRUE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_TRUE);
347    pub const UNIT: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_UNIT);
348    pub const NONE: NanValue = NanValue(QNAN | (TAG_NONE << TAG_SHIFT));
349    pub const EMPTY_LIST: NanValue = NanValue(QNAN | (TAG_LIST << TAG_SHIFT));
350    pub const EMPTY_MAP: NanValue = NanValue(QNAN | (TAG_MAP << TAG_SHIFT));
351    pub const EMPTY_VECTOR: NanValue = NanValue(QNAN | (TAG_VECTOR << TAG_SHIFT));
352    pub const EMPTY_STRING: NanValue = NanValue(QNAN | (TAG_STRING << TAG_SHIFT));
353
354    #[inline]
355    pub fn new_bool(b: bool) -> Self {
356        if b { Self::TRUE } else { Self::FALSE }
357    }
358
359    #[inline]
360    pub fn as_bool(self) -> bool {
361        self.0 == Self::TRUE.0
362    }
363
364    #[inline]
365    pub fn plain_immediate_payload(self) -> Option<u64> {
366        (self.is_nan_boxed() && self.tag() == TAG_IMMEDIATE && self.payload() <= IMM_UNIT)
367            .then_some(self.payload())
368    }
369
370    #[inline]
371    pub fn wrapper_kind(self) -> u64 {
372        match self.tag() {
373            TAG_SOME => WRAP_SOME,
374            TAG_OK => WRAP_OK,
375            TAG_ERR => WRAP_ERR,
376            _ => panic!("wrapper_kind() called on non-wrapper"),
377        }
378    }
379
380    #[inline]
381    fn wrapper_inline_kind(self) -> Option<u64> {
382        if !self.is_nan_boxed() {
383            return None;
384        }
385        match self.tag() {
386            TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT == 0 => {
387                Some((self.payload() & WRAPPER_INLINE_KIND_MASK) >> WRAPPER_INLINE_KIND_SHIFT)
388            }
389            _ => None,
390        }
391    }
392
393    #[inline]
394    fn decode_wrapper_inline_int_payload(payload: u64) -> i64 {
395        let raw = payload & WRAPPER_INT_INLINE_MASK;
396        if raw & (1u64 << 42) != 0 {
397            (raw | !WRAPPER_INT_INLINE_MASK) as i64
398        } else {
399            raw as i64
400        }
401    }
402
403    #[inline]
404    fn encode_wrapper_inline_int(i: i64) -> u64 {
405        debug_assert!((WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&i));
406        (i as u64) & WRAPPER_INT_INLINE_MASK
407    }
408
409    #[inline]
410    pub fn wrapper_inline_inner(self) -> Option<NanValue> {
411        let kind = self.wrapper_inline_kind()?;
412        let payload = self.payload() & WRAPPER_INLINE_PAYLOAD_MASK;
413        match kind {
414            WRAPPER_INLINE_IMMEDIATE => Some(Self::encode(TAG_IMMEDIATE, payload)),
415            WRAPPER_INLINE_INT => Some(Self::new_int_inline(
416                Self::decode_wrapper_inline_int_payload(payload),
417            )),
418            WRAPPER_INLINE_NONE => Some(Self::NONE),
419            _ => None,
420        }
421    }
422
423    #[inline]
424    fn new_inline_wrapper(tag: u64, inline_kind: u64, payload: u64) -> Self {
425        debug_assert!(matches!(tag, TAG_SOME | TAG_OK | TAG_ERR));
426        debug_assert!(inline_kind <= WRAPPER_INLINE_NONE);
427        debug_assert!(payload <= WRAPPER_INLINE_PAYLOAD_MASK);
428        Self::encode(tag, (inline_kind << WRAPPER_INLINE_KIND_SHIFT) | payload)
429    }
430
431    #[inline]
432    pub fn wrapper_parts<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<(u64, NanValue)> {
433        if !self.is_nan_boxed() {
434            return None;
435        }
436        match self.tag() {
437            TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT != 0 => {
438                Some((self.wrapper_kind(), arena.get_boxed(self.arena_index())))
439            }
440            TAG_SOME | TAG_OK | TAG_ERR => self
441                .wrapper_inline_inner()
442                .map(|inner| (self.wrapper_kind(), inner)),
443            _ => None,
444        }
445    }
446
447    // -- Wrappers (Some/Ok/Err) -------------------------------------------
448
449    #[inline]
450    pub fn new_some(inner_index: u32) -> Self {
451        Self::encode(TAG_SOME, ARENA_REF_BIT | (inner_index as u64))
452    }
453
454    #[inline]
455    pub fn new_ok(inner_index: u32) -> Self {
456        Self::encode(TAG_OK, ARENA_REF_BIT | (inner_index as u64))
457    }
458
459    #[inline]
460    pub fn new_err(inner_index: u32) -> Self {
461        Self::encode(TAG_ERR, ARENA_REF_BIT | (inner_index as u64))
462    }
463
464    #[inline]
465    pub fn wrapper_index(self) -> u32 {
466        debug_assert!(
467            self.is_nan_boxed()
468                && matches!(self.tag(), TAG_SOME | TAG_OK | TAG_ERR)
469                && self.payload() & ARENA_REF_BIT != 0
470        );
471        self.arena_index()
472    }
473
474    #[inline]
475    pub fn wrapper_inner<T: ArenaTypes>(self, arena: &Arena<T>) -> NanValue {
476        self.wrapper_parts(arena)
477            .map(|(_, inner)| inner)
478            .expect("wrapper_inner() called on non-wrapper")
479    }
480
481    #[inline]
482    fn wrap_value<T: ArenaTypes>(kind: u64, inner: NanValue, arena: &mut Arena<T>) -> Self {
483        if let Some(payload) = inner.plain_immediate_payload() {
484            let tag = match kind {
485                WRAP_SOME => TAG_SOME,
486                WRAP_OK => TAG_OK,
487                WRAP_ERR => TAG_ERR,
488                _ => unreachable!("invalid wrapper kind"),
489            };
490            Self::new_inline_wrapper(tag, WRAPPER_INLINE_IMMEDIATE, payload)
491        } else if inner.is_none() {
492            let tag = match kind {
493                WRAP_SOME => TAG_SOME,
494                WRAP_OK => TAG_OK,
495                WRAP_ERR => TAG_ERR,
496                _ => unreachable!("invalid wrapper kind"),
497            };
498            Self::new_inline_wrapper(tag, WRAPPER_INLINE_NONE, 0)
499        } else if let Some(value) = inner.inline_int_value() {
500            if (WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&value) {
501                let tag = match kind {
502                    WRAP_SOME => TAG_SOME,
503                    WRAP_OK => TAG_OK,
504                    WRAP_ERR => TAG_ERR,
505                    _ => unreachable!("invalid wrapper kind"),
506                };
507                return Self::new_inline_wrapper(
508                    tag,
509                    WRAPPER_INLINE_INT,
510                    Self::encode_wrapper_inline_int(value),
511                );
512            }
513            let idx = arena.push_boxed(inner);
514            match kind {
515                WRAP_SOME => Self::new_some(idx),
516                WRAP_OK => Self::new_ok(idx),
517                WRAP_ERR => Self::new_err(idx),
518                _ => unreachable!("invalid wrapper kind"),
519            }
520        } else {
521            let idx = arena.push_boxed(inner);
522            match kind {
523                WRAP_SOME => Self::new_some(idx),
524                WRAP_OK => Self::new_ok(idx),
525                WRAP_ERR => Self::new_err(idx),
526                _ => unreachable!("invalid wrapper kind"),
527            }
528        }
529    }
530
531    #[inline]
532    pub fn new_some_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
533        Self::wrap_value(WRAP_SOME, inner, arena)
534    }
535
536    #[inline]
537    pub fn new_ok_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
538        Self::wrap_value(WRAP_OK, inner, arena)
539    }
540
541    #[inline]
542    pub fn new_err_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
543        Self::wrap_value(WRAP_ERR, inner, arena)
544    }
545
546    // -- Arena-backed constructors -----------------------------------------
547
548    #[inline]
549    pub fn new_string(arena_index: u32) -> Self {
550        Self::encode(TAG_STRING, STRING_ARENA_BIT | (arena_index as u64))
551    }
552
553    #[inline]
554    fn new_small_string_bytes(bytes: &[u8]) -> Self {
555        debug_assert!(bytes.len() <= STRING_INLINE_MAX_BYTES);
556        let mut payload = (bytes.len() as u64) << STRING_INLINE_LEN_SHIFT;
557        for (idx, byte) in bytes.iter().enumerate() {
558            payload |= (*byte as u64) << (idx * 8);
559        }
560        Self::encode(TAG_STRING, payload)
561    }
562
563    #[inline]
564    pub fn small_string(self) -> Option<NanString<'static>> {
565        if !self.is_nan_boxed()
566            || self.tag() != TAG_STRING
567            || self.payload() & STRING_ARENA_BIT != 0
568        {
569            return None;
570        }
571        let payload = self.payload();
572        let len = ((payload & STRING_INLINE_LEN_MASK) >> STRING_INLINE_LEN_SHIFT) as u8;
573        if len as usize > STRING_INLINE_MAX_BYTES {
574            return None;
575        }
576        let mut bytes = [0u8; STRING_INLINE_MAX_BYTES];
577        for (idx, slot) in bytes.iter_mut().take(len as usize).enumerate() {
578            *slot = ((payload >> (idx * 8)) & 0xFF) as u8;
579        }
580        Some(NanString::Inline { len, bytes })
581    }
582
583    #[inline]
584    pub fn new_string_value<T: ArenaTypes>(s: &str, arena: &mut Arena<T>) -> Self {
585        if s.len() <= STRING_INLINE_MAX_BYTES {
586            Self::new_small_string_bytes(s.as_bytes())
587        } else {
588            Self::new_string(arena.push_string(s))
589        }
590    }
591
592    #[inline]
593    pub fn new_list(arena_index: u32) -> Self {
594        Self::encode(TAG_LIST, ARENA_REF_BIT | (arena_index as u64))
595    }
596
597    #[inline]
598    pub fn new_tuple(arena_index: u32) -> Self {
599        Self::encode(TAG_TUPLE, ARENA_REF_BIT | (arena_index as u64))
600    }
601
602    #[inline]
603    pub fn new_map(arena_index: u32) -> Self {
604        Self::encode(TAG_MAP, ARENA_REF_BIT | (arena_index as u64))
605    }
606
607    #[inline]
608    pub fn new_vector(arena_index: u32) -> Self {
609        Self::encode(TAG_VECTOR, ARENA_REF_BIT | (arena_index as u64))
610    }
611
612    #[inline]
613    pub fn new_record(arena_index: u32) -> Self {
614        Self::encode(TAG_RECORD, ARENA_REF_BIT | (arena_index as u64))
615    }
616
617    #[inline]
618    pub fn new_variant(arena_index: u32) -> Self {
619        Self::encode(TAG_VARIANT, ARENA_REF_BIT | (arena_index as u64))
620    }
621
622    #[inline]
623    fn new_symbol(symbol_kind: u64, symbol_index: u32) -> Self {
624        Self::encode(TAG_SYMBOL, symbol_kind | ((symbol_index as u64) << 2))
625    }
626
627    #[inline]
628    pub fn symbol_kind(self) -> u64 {
629        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
630        self.payload() & SYMBOL_KIND_MASK
631    }
632
633    #[inline]
634    pub fn symbol_index(self) -> u32 {
635        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
636        (self.payload() >> 2) as u32
637    }
638
639    #[inline]
640    pub fn new_nullary_variant(symbol_index: u32) -> Self {
641        Self::new_symbol(SYMBOL_NULLARY_VARIANT, symbol_index)
642    }
643
644    #[inline]
645    pub fn try_new_inline_variant(ctor_id: u32, inner: NanValue) -> Option<Self> {
646        if ctor_id > IV_CTOR_MASK as u32 {
647            return None;
648        }
649        let ctor_bits = (ctor_id as u64) << IV_CTOR_SHIFT;
650
651        if inner.is_nan_boxed() {
652            match inner.tag() {
653                TAG_INT if inner.payload() & INT_BIG_BIT == 0 => {
654                    let i = Self::decode_inline_int_payload(inner.payload());
655                    if (IV_INT_MIN..=IV_INT_MAX).contains(&i) {
656                        let int_bits = (i as u64) & IV_INT_MASK;
657                        return Some(Self::encode(TAG_INLINE_VARIANT, ctor_bits | int_bits));
658                    }
659                }
660                TAG_IMMEDIATE => {
661                    let imm = match inner.payload() {
662                        IMM_FALSE => IV_IMM_FALSE,
663                        IMM_TRUE => IV_IMM_TRUE,
664                        IMM_UNIT => IV_IMM_UNIT,
665                        _ => return None,
666                    };
667                    return Some(Self::encode(
668                        TAG_INLINE_VARIANT,
669                        ctor_bits | IV_KIND_BIT | (imm << IV_IMM_SHIFT),
670                    ));
671                }
672                TAG_NONE => {
673                    return Some(Self::encode(
674                        TAG_INLINE_VARIANT,
675                        ctor_bits | IV_KIND_BIT | (IV_IMM_NONE << IV_IMM_SHIFT),
676                    ));
677                }
678                _ => {}
679            }
680        }
681        None
682    }
683
684    #[inline]
685    pub fn inline_variant_ctor_id(self) -> u32 {
686        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT);
687        ((self.payload() >> IV_CTOR_SHIFT) & IV_CTOR_MASK) as u32
688    }
689
690    #[inline]
691    pub fn inline_variant_inner(self) -> NanValue {
692        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT);
693        let payload = self.payload();
694        if payload & IV_KIND_BIT == 0 {
695            let raw = payload & IV_INT_MASK;
696            let i = if raw & IV_INT_SIGN_BIT != 0 {
697                (raw | !IV_INT_MASK) as i64
698            } else {
699                raw as i64
700            };
701            Self::new_int_inline(i)
702        } else {
703            let imm = (payload >> IV_IMM_SHIFT) & 0b11;
704            match imm {
705                IV_IMM_FALSE => Self::FALSE,
706                IV_IMM_TRUE => Self::TRUE,
707                IV_IMM_UNIT => Self::UNIT,
708                IV_IMM_NONE => Self::NONE,
709                _ => unreachable!(),
710            }
711        }
712    }
713
714    #[inline]
715    pub fn new_fn(arena_index: u32) -> Self {
716        Self::new_symbol(SYMBOL_FN, arena_index)
717    }
718
719    #[inline]
720    pub fn new_builtin(arena_index: u32) -> Self {
721        Self::new_symbol(SYMBOL_BUILTIN, arena_index)
722    }
723
724    #[inline]
725    pub fn new_namespace(arena_index: u32) -> Self {
726        Self::new_symbol(SYMBOL_NAMESPACE, arena_index)
727    }
728
729    #[inline]
730    pub fn arena_index(self) -> u32 {
731        (self.payload() & !ARENA_REF_BIT) as u32
732    }
733
734    #[inline]
735    pub fn heap_index(self) -> Option<u32> {
736        if !self.is_nan_boxed() {
737            return None;
738        }
739        match self.tag() {
740            TAG_INT => {
741                let p = self.payload();
742                if p & INT_BIG_BIT != 0 {
743                    Some((p & !INT_BIG_BIT) as u32)
744                } else {
745                    None
746                }
747            }
748            TAG_STRING | TAG_SOME | TAG_OK | TAG_ERR | TAG_LIST | TAG_TUPLE | TAG_MAP
749            | TAG_RECORD | TAG_VARIANT | TAG_VECTOR => {
750                (self.payload() & ARENA_REF_BIT != 0).then_some(self.arena_index())
751            }
752            _ => None,
753        }
754    }
755
756    #[inline]
757    pub fn with_heap_index(self, index: u32) -> Self {
758        if !self.is_nan_boxed() {
759            return self;
760        }
761        match self.tag() {
762            TAG_INT => {
763                debug_assert!(self.payload() & INT_BIG_BIT != 0);
764                Self::new_int_arena(index)
765            }
766            TAG_SOME => Self::new_some(index),
767            TAG_OK => Self::new_ok(index),
768            TAG_ERR => Self::new_err(index),
769            TAG_STRING => Self::new_string(index),
770            TAG_LIST => Self::new_list(index),
771            TAG_TUPLE => Self::new_tuple(index),
772            TAG_MAP => Self::new_map(index),
773            TAG_VECTOR => Self::new_vector(index),
774            TAG_RECORD => Self::new_record(index),
775            TAG_VARIANT => Self::new_variant(index),
776            _ => self,
777        }
778    }
779
780    // -- Type checks -------------------------------------------------------
781
782    #[inline]
783    pub fn is_float(self) -> bool {
784        !self.is_nan_boxed()
785    }
786
787    #[inline]
788    pub fn is_int(self) -> bool {
789        self.is_nan_boxed() && self.tag() == TAG_INT
790    }
791
792    #[inline]
793    pub fn is_bool(self) -> bool {
794        self.is_nan_boxed()
795            && self.tag() == TAG_IMMEDIATE
796            && (self.payload() == IMM_TRUE || self.payload() == IMM_FALSE)
797    }
798
799    #[inline]
800    pub fn is_unit(self) -> bool {
801        self.0 == Self::UNIT.0
802    }
803
804    #[inline]
805    pub fn is_none(self) -> bool {
806        self.0 == Self::NONE.0
807    }
808
809    #[inline]
810    pub fn is_some(self) -> bool {
811        self.is_nan_boxed() && self.tag() == TAG_SOME
812    }
813
814    #[inline]
815    pub fn is_ok(self) -> bool {
816        self.is_nan_boxed() && self.tag() == TAG_OK
817    }
818
819    #[inline]
820    pub fn is_err(self) -> bool {
821        self.is_nan_boxed() && self.tag() == TAG_ERR
822    }
823
824    #[inline]
825    pub fn is_string(self) -> bool {
826        self.is_nan_boxed() && self.tag() == TAG_STRING
827    }
828
829    pub fn string_eq<T: ArenaTypes>(self, other: NanValue, arena: &Arena<T>) -> bool {
830        if self.bits() == other.bits() {
831            return true;
832        }
833        if !self.is_string() || !other.is_string() {
834            return false;
835        }
836        arena.get_string_value(self).as_str() == arena.get_string_value(other).as_str()
837    }
838
839    #[inline]
840    pub fn is_list(self) -> bool {
841        self.is_nan_boxed() && self.tag() == TAG_LIST
842    }
843
844    #[inline]
845    pub fn is_record(self) -> bool {
846        self.is_nan_boxed() && self.tag() == TAG_RECORD
847    }
848
849    #[inline]
850    pub fn is_fn(self) -> bool {
851        self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_FN
852    }
853
854    #[inline]
855    pub fn is_variant(self) -> bool {
856        self.is_nan_boxed()
857            && (self.tag() == TAG_VARIANT
858                || self.tag() == TAG_INLINE_VARIANT
859                || (self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NULLARY_VARIANT))
860    }
861
862    #[inline]
863    pub fn is_inline_variant(self) -> bool {
864        self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT
865    }
866
867    #[inline]
868    pub fn is_map(self) -> bool {
869        self.is_nan_boxed() && self.tag() == TAG_MAP
870    }
871
872    #[inline]
873    pub fn is_vector(self) -> bool {
874        self.is_nan_boxed() && self.tag() == TAG_VECTOR
875    }
876
877    #[inline]
878    pub fn is_empty_vector_immediate(self) -> bool {
879        self.is_nan_boxed() && self.tag() == TAG_VECTOR && self.payload() & ARENA_REF_BIT == 0
880    }
881
882    #[inline]
883    pub fn is_tuple(self) -> bool {
884        self.is_nan_boxed() && self.tag() == TAG_TUPLE
885    }
886
887    #[inline]
888    pub fn is_builtin(self) -> bool {
889        self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_BUILTIN
890    }
891
892    #[inline]
893    pub fn is_namespace(self) -> bool {
894        self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NAMESPACE
895    }
896
897    #[inline]
898    pub fn is_empty_list_immediate(self) -> bool {
899        self.is_nan_boxed() && self.tag() == TAG_LIST && self.payload() & ARENA_REF_BIT == 0
900    }
901
902    #[inline]
903    pub fn is_empty_map_immediate(self) -> bool {
904        self.is_nan_boxed() && self.tag() == TAG_MAP && self.payload() & ARENA_REF_BIT == 0
905    }
906
907    pub fn type_name(self) -> &'static str {
908        if self.is_float() {
909            return "Float";
910        }
911        match self.tag() {
912            TAG_INT => "Int",
913            TAG_IMMEDIATE => match self.payload() {
914                IMM_FALSE | IMM_TRUE => "Bool",
915                IMM_UNIT => "Unit",
916                _ => "Unknown",
917            },
918            TAG_SOME => "Option.Some",
919            TAG_NONE => "Option.None",
920            TAG_OK => "Result.Ok",
921            TAG_ERR => "Result.Err",
922            TAG_STRING => "String",
923            TAG_LIST => "List",
924            TAG_TUPLE => "Tuple",
925            TAG_MAP => "Map",
926            TAG_VECTOR => "Vector",
927            TAG_RECORD => "Record",
928            TAG_VARIANT | TAG_INLINE_VARIANT => "Variant",
929            TAG_SYMBOL => match self.symbol_kind() {
930                SYMBOL_FN => "Fn",
931                SYMBOL_BUILTIN => "Builtin",
932                SYMBOL_NAMESPACE => "Namespace",
933                SYMBOL_NULLARY_VARIANT => "Variant",
934                _ => "Unknown",
935            },
936            _ => "Unknown",
937        }
938    }
939
940    #[inline]
941    pub fn variant_ctor_id<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<u32> {
942        if !self.is_nan_boxed() {
943            return None;
944        }
945        match self.tag() {
946            TAG_INLINE_VARIANT => Some(self.inline_variant_ctor_id()),
947            TAG_VARIANT => {
948                let (type_id, variant_id, _) = arena.get_variant(self.arena_index());
949                arena.find_ctor_id(type_id, variant_id)
950            }
951            TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
952                Some(arena.get_nullary_variant_ctor(self.symbol_index()))
953            }
954            _ => None,
955        }
956    }
957
958    #[inline]
959    pub fn variant_parts<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<(u32, u16, &[NanValue])> {
960        if !self.is_nan_boxed() {
961            return None;
962        }
963        match self.tag() {
964            TAG_VARIANT => {
965                let (type_id, variant_id, fields) = arena.get_variant(self.arena_index());
966                Some((type_id, variant_id, fields))
967            }
968            TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
969                let (type_id, variant_id) =
970                    arena.get_ctor_parts(arena.get_nullary_variant_ctor(self.symbol_index()));
971                Some((type_id, variant_id, &[]))
972            }
973            _ => None,
974        }
975    }
976
977    #[inline]
978    pub fn variant_single_field<T: ArenaTypes>(self, arena: &Arena<T>) -> NanValue {
979        if self.tag() == TAG_INLINE_VARIANT {
980            self.inline_variant_inner()
981        } else {
982            let (_, _, fields) = arena.get_variant(self.arena_index());
983            debug_assert_eq!(fields.len(), 1);
984            fields[0]
985        }
986    }
987
988    #[inline]
989    pub fn inline_variant_info<T: ArenaTypes>(
990        self,
991        arena: &Arena<T>,
992    ) -> Option<(u32, u16, NanValue)> {
993        if !self.is_nan_boxed() || self.tag() != TAG_INLINE_VARIANT {
994            return None;
995        }
996        let ctor_id = self.inline_variant_ctor_id();
997        let (type_id, variant_id) = arena.get_ctor_parts(ctor_id);
998        Some((type_id, variant_id, self.inline_variant_inner()))
999    }
1000
1001    #[inline]
1002    pub fn bits(self) -> u64 {
1003        self.0
1004    }
1005
1006    #[inline]
1007    pub fn from_bits(bits: u64) -> Self {
1008        NanValue(bits)
1009    }
1010
1011    pub fn map_key_hash<T: ArenaTypes>(self, arena: &Arena<T>) -> u64 {
1012        if self.is_string() {
1013            use core::hash::{Hash, Hasher};
1014            let mut hasher = DefaultHasher::new();
1015            3u8.hash(&mut hasher);
1016            arena.get_string_value(self).hash(&mut hasher);
1017            hasher.finish()
1018        } else {
1019            self.bits()
1020        }
1021    }
1022}
1023
1024// -- Debug -----------------------------------------------------------------
1025
1026impl core::fmt::Debug for NanValue {
1027    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1028        if self.is_float() {
1029            return write!(f, "Float({})", self.as_float());
1030        }
1031        match self.tag() {
1032            TAG_INT => {
1033                if self.payload() & INT_BIG_BIT != 0 {
1034                    write!(f, "Int(arena:{})", (self.payload() & !INT_BIG_BIT) as u32)
1035                } else {
1036                    write!(
1037                        f,
1038                        "Int({})",
1039                        Self::decode_inline_int_payload(self.payload())
1040                    )
1041                }
1042            }
1043            TAG_IMMEDIATE => match self.payload() {
1044                IMM_FALSE => write!(f, "False"),
1045                IMM_TRUE => write!(f, "True"),
1046                IMM_UNIT => write!(f, "Unit"),
1047                _ => write!(f, "Immediate({})", self.payload()),
1048            },
1049            TAG_NONE => write!(f, "None"),
1050            TAG_SOME | TAG_OK | TAG_ERR => {
1051                let kind = match self.tag() {
1052                    TAG_SOME => "Some",
1053                    TAG_OK => "Ok",
1054                    TAG_ERR => "Err",
1055                    _ => "?",
1056                };
1057                if self.payload() & ARENA_REF_BIT != 0 {
1058                    write!(f, "{}(arena:{})", kind, self.arena_index())
1059                } else if let Some(inner) = self.wrapper_inline_inner() {
1060                    write!(f, "{}({:?})", kind, inner)
1061                } else {
1062                    write!(f, "{}(?)", kind)
1063                }
1064            }
1065            TAG_SYMBOL => match self.symbol_kind() {
1066                SYMBOL_FN => write!(f, "Fn(symbol:{})", self.symbol_index()),
1067                SYMBOL_BUILTIN => write!(f, "Builtin(symbol:{})", self.symbol_index()),
1068                SYMBOL_NAMESPACE => write!(f, "Namespace(symbol:{})", self.symbol_index()),
1069                SYMBOL_NULLARY_VARIANT => {
1070                    write!(f, "NullaryVariant(symbol:{})", self.symbol_index())
1071                }
1072                _ => write!(f, "Symbol({})", self.payload()),
1073            },
1074            TAG_STRING => {
1075                if let Some(s) = self.small_string() {
1076                    write!(f, "String({:?})", s.as_str())
1077                } else {
1078                    write!(f, "String(arena:{})", self.arena_index())
1079                }
1080            }
1081            TAG_INLINE_VARIANT => {
1082                let ctor = self.inline_variant_ctor_id();
1083                let inner = self.inline_variant_inner();
1084                write!(f, "InlineVariant(ctor:{}, {:?})", ctor, inner)
1085            }
1086            TAG_LIST if self.is_empty_list_immediate() => write!(f, "EmptyList"),
1087            TAG_MAP if self.is_empty_map_immediate() => write!(f, "EmptyMap"),
1088            TAG_VECTOR if self.is_empty_vector_immediate() => write!(f, "EmptyVector"),
1089            _ => write!(f, "{}(arena:{})", self.type_name(), self.arena_index()),
1090        }
1091    }
1092}
1093
1094// ---------------------------------------------------------------------------
1095// A simple DefaultHasher for no_std — mirrors std::collections::hash_map::DefaultHasher
1096// ---------------------------------------------------------------------------
1097
1098/// SipHasher-like hasher for use in no_std contexts.
1099/// When std is available, delegates to `std::collections::hash_map::DefaultHasher`.
1100struct DefaultHasher {
1101    #[cfg(feature = "std")]
1102    inner: std::collections::hash_map::DefaultHasher,
1103    #[cfg(not(feature = "std"))]
1104    state: u64,
1105}
1106
1107impl DefaultHasher {
1108    fn new() -> Self {
1109        #[cfg(feature = "std")]
1110        {
1111            Self {
1112                inner: std::collections::hash_map::DefaultHasher::new(),
1113            }
1114        }
1115        #[cfg(not(feature = "std"))]
1116        {
1117            Self {
1118                state: 0xcbf29ce484222325,
1119            }
1120        }
1121    }
1122}
1123
1124impl Hasher for DefaultHasher {
1125    #[cfg(feature = "std")]
1126    fn finish(&self) -> u64 {
1127        self.inner.finish()
1128    }
1129    #[cfg(feature = "std")]
1130    fn write(&mut self, bytes: &[u8]) {
1131        self.inner.write(bytes)
1132    }
1133
1134    #[cfg(not(feature = "std"))]
1135    fn finish(&self) -> u64 {
1136        self.state
1137    }
1138    #[cfg(not(feature = "std"))]
1139    fn write(&mut self, bytes: &[u8]) {
1140        for &b in bytes {
1141            self.state ^= b as u64;
1142            self.state = self.state.wrapping_mul(0x100000001b3);
1143        }
1144    }
1145}
1146
1147// ---------------------------------------------------------------------------
1148// Arena
1149// ---------------------------------------------------------------------------
1150
1151#[derive(Debug, Clone)]
1152pub struct Arena<T: ArenaTypes> {
1153    young_entries: Vec<ArenaEntry<T>>,
1154    yard_entries: Vec<ArenaEntry<T>>,
1155    handoff_entries: Vec<ArenaEntry<T>>,
1156    stable_entries: Vec<ArenaEntry<T>>,
1157    scratch_young: Vec<u32>,
1158    scratch_yard: Vec<u32>,
1159    scratch_handoff: Vec<u32>,
1160    scratch_stable: Vec<u32>,
1161    peak_usage: ArenaUsage,
1162    alloc_space: AllocSpace,
1163    pub type_names: Vec<String>,
1164    pub type_field_names: Vec<Vec<String>>,
1165    pub type_variant_names: Vec<Vec<String>>,
1166    pub type_variant_ctor_ids: Vec<Vec<u32>>,
1167    pub ctor_to_type_variant: Vec<(u32, u16)>,
1168    pub symbol_entries: Vec<ArenaSymbol<T>>,
1169}
1170
1171#[derive(Debug, Clone)]
1172pub enum ArenaEntry<T: ArenaTypes> {
1173    Int(i64),
1174    String(Rc<str>),
1175    List(ArenaList),
1176    Tuple(Vec<NanValue>),
1177    Map(T::Map),
1178    Vector(Vec<NanValue>),
1179    Record {
1180        type_id: u32,
1181        fields: Vec<NanValue>,
1182    },
1183    Variant {
1184        type_id: u32,
1185        variant_id: u16,
1186        fields: Vec<NanValue>,
1187    },
1188    Fn(Rc<T::Fn>),
1189    Builtin(Rc<str>),
1190    Namespace {
1191        name: Rc<str>,
1192        members: Vec<(Rc<str>, NanValue)>,
1193    },
1194    Boxed(NanValue),
1195}
1196
1197#[derive(Debug, Clone)]
1198pub enum ArenaSymbol<T: ArenaTypes> {
1199    Fn(Rc<T::Fn>),
1200    Builtin(Rc<str>),
1201    Namespace {
1202        name: Rc<str>,
1203        members: Vec<(Rc<str>, NanValue)>,
1204    },
1205    NullaryVariant {
1206        ctor_id: u32,
1207    },
1208}
1209
1210#[derive(Debug, Clone)]
1211pub enum ArenaList {
1212    Flat {
1213        items: Rc<Vec<NanValue>>,
1214        start: usize,
1215    },
1216    Prepend {
1217        head: NanValue,
1218        tail: NanValue,
1219        len: usize,
1220    },
1221    Concat {
1222        left: NanValue,
1223        right: NanValue,
1224        len: usize,
1225    },
1226    Segments {
1227        current: NanValue,
1228        rest: Rc<Vec<NanValue>>,
1229        start: usize,
1230        len: usize,
1231    },
1232}
1233
1234const LIST_APPEND_CHUNK_LIMIT: usize = 128;
1235
1236#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1237pub(crate) enum HeapSpace {
1238    Young = 0,
1239    Yard = 1,
1240    Handoff = 2,
1241    Stable = 3,
1242}
1243
1244#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1245pub enum AllocSpace {
1246    Young,
1247    Yard,
1248    Handoff,
1249}
1250
1251#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
1252pub struct ArenaUsage {
1253    pub young: usize,
1254    pub yard: usize,
1255    pub handoff: usize,
1256    pub stable: usize,
1257}
1258
1259impl ArenaUsage {
1260    pub fn total(self) -> usize {
1261        self.young + self.yard + self.handoff + self.stable
1262    }
1263}
1264
1265pub(crate) const HEAP_SPACE_SHIFT: u32 = 30;
1266pub(crate) const HEAP_SPACE_MASK_U32: u32 = 0b11 << HEAP_SPACE_SHIFT;
1267pub(crate) const HEAP_INDEX_MASK_U32: u32 = (1 << HEAP_SPACE_SHIFT) - 1;
1268
1269mod arena;
1270mod compare;
1271mod lists;
1272mod memory;
1273
1274// ---------------------------------------------------------------------------
1275// Feature-gated MapLike impl for aver_rt::AverMap
1276// ---------------------------------------------------------------------------
1277
1278#[cfg(feature = "runtime")]
1279/// `PersistentMap` type alias used by the VM arena.
1280pub type PersistentMap = aver_rt::AverMap<u64, (NanValue, NanValue)>;
1281
1282#[cfg(feature = "runtime")]
1283impl MapLike for aver_rt::AverMap<u64, (NanValue, NanValue)> {
1284    fn new() -> Self {
1285        aver_rt::AverMap::new()
1286    }
1287
1288    fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)> {
1289        aver_rt::AverMap::get(self, key)
1290    }
1291
1292    fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self {
1293        aver_rt::AverMap::insert(self, key, value)
1294    }
1295
1296    fn len(&self) -> usize {
1297        aver_rt::AverMap::len(self)
1298    }
1299
1300    fn is_empty(&self) -> bool {
1301        aver_rt::AverMap::is_empty(self)
1302    }
1303
1304    fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))> {
1305        aver_rt::AverMap::iter(self)
1306    }
1307
1308    fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)> {
1309        aver_rt::AverMap::values(self)
1310    }
1311}