Skip to main content

aver_memory/
lib.rs

1//! NaN-boxed compact Value representation (8 bytes per value).
2//!
3//! Layout: every value is a `u64` interpreted as an IEEE 754 `f64`.
4//!
5//! - **Float**: any f64 that is NOT a quiet NaN with our marker -> stored directly.
6//! - **Everything else**: encoded as a quiet NaN with tag + payload in the mantissa.
7//!
8//! IEEE 754 quiet NaN: exponent=0x7FF (all 1s), quiet bit=1, plus our marker bit.
9//! We use `0x7FFC` as 14-bit prefix (bits 63-50), leaving bits 49-0 free.
10//!
11//! ```text
12//! 63      50 49  46 45                    0
13//! +--------+------+------------------------+
14//! | 0x7FFC | tag  |       payload          |
15//! | 14 bits| 4 bit|       46 bits          |
16//! +--------+------+------------------------+
17//! ```
18//!
19//! Tag map:
20//!   0  = Immediate       payload 0-2: false/true/unit
21//!   1  = Symbol          payload bits 0-1: fn/builtin/namespace/nullary-variant; rest=symbol index
22//!   2  = Int             payload bit45: 0=inline(45-bit signed), 1=arena index
23//!   3  = String          payload bit45: 0=inline small string (len + 5 bytes), 1=arena index
24//!   4  = Some            payload bit45: 0=inline inner, 1=arena index
25//!   5  = None            singleton
26//!   6  = Ok              payload bit45: 0=inline inner, 1=arena index
27//!   7  = Err             payload bit45: 0=inline inner, 1=arena index
28//!   8  = List            payload bit45: 0=empty list, 1=arena index
29//!   9  = Vector          payload bit45: 0=empty vector, 1=arena index
30//!   10 = Map             payload bit45: 0=empty map, 1=arena index
31//!   11 = Record          payload bit45: 1=arena index
32//!   12 = Variant         payload bit45: 1=arena index
33//!   13 = Tuple           payload bit45: 1=arena index
34//!   14 = InlineVariant  [45:30]=ctor_id, [29]=kind(0=int,1=imm), [28:0]=value
35//!   15 = (reserved)
36
37#![cfg_attr(not(feature = "std"), no_std)]
38
39extern crate alloc;
40
41use alloc::format;
42use alloc::string::String;
43use alloc::sync::Arc as Rc;
44use alloc::vec::Vec;
45use core::cmp::Ordering;
46use core::hash::{Hash, Hasher};
47use core::ops::Deref;
48
49// ---------------------------------------------------------------------------
50// ArenaTypes trait — parameterises the arena over consumer-specific types
51// ---------------------------------------------------------------------------
52
53/// Trait that defines the function and map types used by the arena.
54///
55/// The arena stores function values and persistent maps, but their concrete
56/// types depend on the consumer (VM, WASM runtime, codegen, etc.).
57pub trait ArenaTypes: Clone + core::fmt::Debug + 'static {
58    /// The function value type (e.g. `FunctionValue` in the VM).
59    type Fn: Clone + core::fmt::Debug + FnValueName;
60    /// The persistent map type (e.g. `AverMap<u64, (NanValue, NanValue)>`).
61    type Map: Clone + core::fmt::Debug + MapLike;
62}
63
64/// Trait for extracting a display name from a function value.
65pub trait FnValueName {
66    fn name(&self) -> &str;
67}
68
69/// Trait abstracting the persistent-map operations needed by the arena.
70///
71/// Implementors provide a hash-keyed map from `u64` to `(NanValue, NanValue)`.
72pub trait MapLike: Sized {
73    fn new() -> Self;
74    fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)>;
75    fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self;
76    /// Insert with owned self — avoids clone when sole owner.
77    fn insert_owned(self, key: u64, value: (NanValue, NanValue)) -> Self {
78        self.insert(key, value) // default: fall back to &self version
79    }
80    fn len(&self) -> usize;
81    fn is_empty(&self) -> bool {
82        self.len() == 0
83    }
84    fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))>;
85    fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)>;
86}
87
88// ---------------------------------------------------------------------------
89// Bit layout constants
90// ---------------------------------------------------------------------------
91
92const QNAN: u64 = 0x7FFC_0000_0000_0000;
93const QNAN_MASK: u64 = 0xFFFC_0000_0000_0000;
94const TAG_SHIFT: u32 = 46;
95const TAG_MASK: u64 = 0xF;
96const PAYLOAD_MASK: u64 = (1u64 << 46) - 1;
97
98pub const TAG_IMMEDIATE: u64 = 0;
99pub const TAG_SYMBOL: u64 = 1;
100pub const TAG_INT: u64 = 2;
101pub const TAG_STRING: u64 = 3;
102pub const TAG_SOME: u64 = 4;
103pub const TAG_NONE: u64 = 5;
104pub const TAG_OK: u64 = 6;
105pub const TAG_ERR: u64 = 7;
106pub const TAG_LIST: u64 = 8;
107pub const TAG_VECTOR: u64 = 9;
108pub const TAG_MAP: u64 = 10;
109pub const TAG_RECORD: u64 = 11;
110pub const TAG_VARIANT: u64 = 12;
111pub const TAG_TUPLE: u64 = 13;
112pub const TAG_INLINE_VARIANT: u64 = 14;
113
114pub const SYMBOL_FN: u64 = 0;
115pub const SYMBOL_BUILTIN: u64 = 1;
116pub const SYMBOL_NAMESPACE: u64 = 2;
117pub const SYMBOL_NULLARY_VARIANT: u64 = 3;
118const SYMBOL_KIND_MASK: u64 = 0b11;
119
120pub const IMM_FALSE: u64 = 0;
121pub const IMM_TRUE: u64 = 1;
122pub const IMM_UNIT: u64 = 2;
123
124pub const WRAP_SOME: u64 = 0;
125pub const WRAP_OK: u64 = 1;
126pub const WRAP_ERR: u64 = 2;
127const WRAPPER_INLINE_KIND_SHIFT: u32 = 43;
128const WRAPPER_INLINE_KIND_MASK: u64 = 0b11 << WRAPPER_INLINE_KIND_SHIFT;
129const WRAPPER_INLINE_PAYLOAD_MASK: u64 = (1u64 << WRAPPER_INLINE_KIND_SHIFT) - 1;
130const WRAPPER_INLINE_IMMEDIATE: u64 = 0;
131const WRAPPER_INLINE_INT: u64 = 1;
132const WRAPPER_INLINE_NONE: u64 = 2;
133const WRAPPER_INT_INLINE_MASK: u64 = WRAPPER_INLINE_PAYLOAD_MASK;
134const WRAPPER_INT_INLINE_MAX: i64 = (1i64 << 42) - 1;
135const WRAPPER_INT_INLINE_MIN: i64 = -(1i64 << 42);
136
137pub const ARENA_REF_BIT: u64 = 1u64 << 45;
138const INT_BIG_BIT: u64 = ARENA_REF_BIT;
139const INT_INLINE_MASK: u64 = (1u64 << 45) - 1;
140pub const INT_INLINE_MAX: i64 = (1i64 << 44) - 1;
141pub const INT_INLINE_MIN: i64 = -(1i64 << 44);
142
143const STRING_ARENA_BIT: u64 = ARENA_REF_BIT;
144const STRING_INLINE_LEN_SHIFT: u32 = 40;
145const STRING_INLINE_LEN_MASK: u64 = 0b111 << STRING_INLINE_LEN_SHIFT;
146const STRING_INLINE_MAX_BYTES: usize = 5;
147
148// -- Inline variant layout --------------------------------------------------
149const IV_CTOR_SHIFT: u32 = 30;
150const IV_CTOR_MASK: u64 = 0xFFFF;
151const IV_KIND_BIT: u64 = 1 << 29;
152const IV_INT_MASK: u64 = (1u64 << 29) - 1;
153const IV_INT_SIGN_BIT: u64 = 1u64 << 28;
154const IV_INT_MAX: i64 = (1i64 << 28) - 1;
155const IV_INT_MIN: i64 = -(1i64 << 28);
156const IV_IMM_SHIFT: u32 = 27;
157const IV_IMM_FALSE: u64 = 0;
158const IV_IMM_TRUE: u64 = 1;
159const IV_IMM_UNIT: u64 = 2;
160const IV_IMM_NONE: u64 = 3;
161
162// ---------------------------------------------------------------------------
163// NanValue - the 8-byte compact value
164// ---------------------------------------------------------------------------
165
166#[derive(Clone, Copy)]
167pub struct NanValue(u64);
168
169#[derive(Clone, Copy, Debug)]
170pub enum NanString<'a> {
171    Borrowed(&'a str),
172    Inline {
173        len: u8,
174        bytes: [u8; STRING_INLINE_MAX_BYTES],
175    },
176}
177
178impl<'a> NanString<'a> {
179    #[inline]
180    pub fn as_str(&self) -> &str {
181        match self {
182            NanString::Borrowed(s) => s,
183            NanString::Inline { len, bytes } => core::str::from_utf8(&bytes[..*len as usize])
184                .expect("NanString inline payload must be valid UTF-8"),
185        }
186    }
187}
188
189impl Deref for NanString<'_> {
190    type Target = str;
191
192    #[inline]
193    fn deref(&self) -> &Self::Target {
194        self.as_str()
195    }
196}
197
198impl PartialEq for NanString<'_> {
199    #[inline]
200    fn eq(&self, other: &Self) -> bool {
201        self.as_str() == other.as_str()
202    }
203}
204
205impl Eq for NanString<'_> {}
206
207impl PartialEq<&str> for NanString<'_> {
208    #[inline]
209    fn eq(&self, other: &&str) -> bool {
210        self.as_str() == *other
211    }
212}
213
214impl PartialEq<NanString<'_>> for &str {
215    #[inline]
216    fn eq(&self, other: &NanString<'_>) -> bool {
217        *self == other.as_str()
218    }
219}
220
221impl PartialOrd for NanString<'_> {
222    #[inline]
223    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
224        Some(self.cmp(other))
225    }
226}
227
228impl Ord for NanString<'_> {
229    #[inline]
230    fn cmp(&self, other: &Self) -> Ordering {
231        self.as_str().cmp(other.as_str())
232    }
233}
234
235impl Hash for NanString<'_> {
236    #[inline]
237    fn hash<H: Hasher>(&self, state: &mut H) {
238        self.as_str().hash(state);
239    }
240}
241
242impl core::fmt::Display for NanString<'_> {
243    #[inline]
244    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
245        f.write_str(self.as_str())
246    }
247}
248
249// -- Encoding / decoding ---------------------------------------------------
250
251impl NanValue {
252    #[inline]
253    fn decode_inline_int_payload(payload: u64) -> i64 {
254        debug_assert!(payload & INT_BIG_BIT == 0);
255        let raw = payload & INT_INLINE_MASK;
256        if raw & (1u64 << 44) != 0 {
257            (raw | !INT_INLINE_MASK) as i64
258        } else {
259            raw as i64
260        }
261    }
262
263    #[inline]
264    pub fn encode(tag: u64, payload: u64) -> Self {
265        debug_assert!(tag <= TAG_MASK);
266        debug_assert!(payload <= PAYLOAD_MASK);
267        NanValue(QNAN | (tag << TAG_SHIFT) | payload)
268    }
269
270    #[inline]
271    pub fn is_nan_boxed(self) -> bool {
272        (self.0 & QNAN_MASK) == QNAN
273    }
274
275    #[inline]
276    pub fn tag(self) -> u64 {
277        (self.0 >> TAG_SHIFT) & TAG_MASK
278    }
279
280    #[inline]
281    pub fn payload(self) -> u64 {
282        self.0 & PAYLOAD_MASK
283    }
284
285    // -- Constructors ------------------------------------------------------
286
287    #[inline]
288    pub fn new_float(f: f64) -> Self {
289        let bits = f.to_bits();
290        if (bits & QNAN_MASK) == QNAN {
291            NanValue(bits ^ 1)
292        } else {
293            NanValue(bits)
294        }
295    }
296
297    #[inline]
298    pub fn as_float(self) -> f64 {
299        f64::from_bits(self.0)
300    }
301
302    #[inline]
303    pub fn new_int_inline(i: i64) -> Self {
304        debug_assert!((INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i));
305        let payload = (i as u64) & INT_INLINE_MASK;
306        Self::encode(TAG_INT, payload)
307    }
308
309    #[inline]
310    pub fn new_int_arena(arena_index: u32) -> Self {
311        Self::encode(TAG_INT, INT_BIG_BIT | (arena_index as u64))
312    }
313
314    #[inline]
315    pub fn new_int<T: ArenaTypes>(i: i64, arena: &mut Arena<T>) -> Self {
316        if (INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i) {
317            Self::new_int_inline(i)
318        } else {
319            let idx = arena.push_i64(i);
320            Self::new_int_arena(idx)
321        }
322    }
323
324    #[inline]
325    pub fn as_int<T: ArenaTypes>(self, arena: &Arena<T>) -> i64 {
326        let p = self.payload();
327        if p & INT_BIG_BIT != 0 {
328            let idx = (p & !INT_BIG_BIT) as u32;
329            arena.get_i64(idx)
330        } else {
331            Self::decode_inline_int_payload(p)
332        }
333    }
334
335    #[inline]
336    pub fn inline_int_payload(self) -> Option<u64> {
337        (self.is_nan_boxed() && self.tag() == TAG_INT && self.payload() & INT_BIG_BIT == 0)
338            .then_some(self.payload())
339    }
340
341    #[inline]
342    pub fn inline_int_value(self) -> Option<i64> {
343        self.inline_int_payload()
344            .map(Self::decode_inline_int_payload)
345    }
346
347    // -- Immediates --------------------------------------------------------
348
349    pub const FALSE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_FALSE);
350    pub const TRUE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_TRUE);
351    pub const UNIT: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_UNIT);
352    pub const NONE: NanValue = NanValue(QNAN | (TAG_NONE << TAG_SHIFT));
353    pub const EMPTY_LIST: NanValue = NanValue(QNAN | (TAG_LIST << TAG_SHIFT));
354    pub const EMPTY_MAP: NanValue = NanValue(QNAN | (TAG_MAP << TAG_SHIFT));
355    pub const EMPTY_VECTOR: NanValue = NanValue(QNAN | (TAG_VECTOR << TAG_SHIFT));
356    pub const EMPTY_STRING: NanValue = NanValue(QNAN | (TAG_STRING << TAG_SHIFT));
357
358    #[inline]
359    pub fn new_bool(b: bool) -> Self {
360        if b { Self::TRUE } else { Self::FALSE }
361    }
362
363    #[inline]
364    pub fn as_bool(self) -> bool {
365        self.0 == Self::TRUE.0
366    }
367
368    #[inline]
369    pub fn plain_immediate_payload(self) -> Option<u64> {
370        (self.is_nan_boxed() && self.tag() == TAG_IMMEDIATE && self.payload() <= IMM_UNIT)
371            .then_some(self.payload())
372    }
373
374    #[inline]
375    pub fn wrapper_kind(self) -> u64 {
376        match self.tag() {
377            TAG_SOME => WRAP_SOME,
378            TAG_OK => WRAP_OK,
379            TAG_ERR => WRAP_ERR,
380            _ => panic!("wrapper_kind() called on non-wrapper"),
381        }
382    }
383
384    #[inline]
385    fn wrapper_inline_kind(self) -> Option<u64> {
386        if !self.is_nan_boxed() {
387            return None;
388        }
389        match self.tag() {
390            TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT == 0 => {
391                Some((self.payload() & WRAPPER_INLINE_KIND_MASK) >> WRAPPER_INLINE_KIND_SHIFT)
392            }
393            _ => None,
394        }
395    }
396
397    #[inline]
398    fn decode_wrapper_inline_int_payload(payload: u64) -> i64 {
399        let raw = payload & WRAPPER_INT_INLINE_MASK;
400        if raw & (1u64 << 42) != 0 {
401            (raw | !WRAPPER_INT_INLINE_MASK) as i64
402        } else {
403            raw as i64
404        }
405    }
406
407    #[inline]
408    fn encode_wrapper_inline_int(i: i64) -> u64 {
409        debug_assert!((WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&i));
410        (i as u64) & WRAPPER_INT_INLINE_MASK
411    }
412
413    #[inline]
414    pub fn wrapper_inline_inner(self) -> Option<NanValue> {
415        let kind = self.wrapper_inline_kind()?;
416        let payload = self.payload() & WRAPPER_INLINE_PAYLOAD_MASK;
417        match kind {
418            WRAPPER_INLINE_IMMEDIATE => Some(Self::encode(TAG_IMMEDIATE, payload)),
419            WRAPPER_INLINE_INT => Some(Self::new_int_inline(
420                Self::decode_wrapper_inline_int_payload(payload),
421            )),
422            WRAPPER_INLINE_NONE => Some(Self::NONE),
423            _ => None,
424        }
425    }
426
427    #[inline]
428    fn new_inline_wrapper(tag: u64, inline_kind: u64, payload: u64) -> Self {
429        debug_assert!(matches!(tag, TAG_SOME | TAG_OK | TAG_ERR));
430        debug_assert!(inline_kind <= WRAPPER_INLINE_NONE);
431        debug_assert!(payload <= WRAPPER_INLINE_PAYLOAD_MASK);
432        Self::encode(tag, (inline_kind << WRAPPER_INLINE_KIND_SHIFT) | payload)
433    }
434
435    #[inline]
436    pub fn wrapper_parts<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<(u64, NanValue)> {
437        if !self.is_nan_boxed() {
438            return None;
439        }
440        match self.tag() {
441            TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT != 0 => {
442                Some((self.wrapper_kind(), arena.get_boxed(self.arena_index())))
443            }
444            TAG_SOME | TAG_OK | TAG_ERR => self
445                .wrapper_inline_inner()
446                .map(|inner| (self.wrapper_kind(), inner)),
447            _ => None,
448        }
449    }
450
451    // -- Wrappers (Some/Ok/Err) -------------------------------------------
452
453    #[inline]
454    pub fn new_some(inner_index: u32) -> Self {
455        Self::encode(TAG_SOME, ARENA_REF_BIT | (inner_index as u64))
456    }
457
458    #[inline]
459    pub fn new_ok(inner_index: u32) -> Self {
460        Self::encode(TAG_OK, ARENA_REF_BIT | (inner_index as u64))
461    }
462
463    #[inline]
464    pub fn new_err(inner_index: u32) -> Self {
465        Self::encode(TAG_ERR, ARENA_REF_BIT | (inner_index as u64))
466    }
467
468    #[inline]
469    pub fn wrapper_index(self) -> u32 {
470        debug_assert!(
471            self.is_nan_boxed()
472                && matches!(self.tag(), TAG_SOME | TAG_OK | TAG_ERR)
473                && self.payload() & ARENA_REF_BIT != 0
474        );
475        self.arena_index()
476    }
477
478    #[inline]
479    pub fn wrapper_inner<T: ArenaTypes>(self, arena: &Arena<T>) -> NanValue {
480        self.wrapper_parts(arena)
481            .map(|(_, inner)| inner)
482            .expect("wrapper_inner() called on non-wrapper")
483    }
484
485    #[inline]
486    fn wrap_value<T: ArenaTypes>(kind: u64, inner: NanValue, arena: &mut Arena<T>) -> Self {
487        if let Some(payload) = inner.plain_immediate_payload() {
488            let tag = match kind {
489                WRAP_SOME => TAG_SOME,
490                WRAP_OK => TAG_OK,
491                WRAP_ERR => TAG_ERR,
492                _ => unreachable!("invalid wrapper kind"),
493            };
494            Self::new_inline_wrapper(tag, WRAPPER_INLINE_IMMEDIATE, payload)
495        } else if inner.is_none() {
496            let tag = match kind {
497                WRAP_SOME => TAG_SOME,
498                WRAP_OK => TAG_OK,
499                WRAP_ERR => TAG_ERR,
500                _ => unreachable!("invalid wrapper kind"),
501            };
502            Self::new_inline_wrapper(tag, WRAPPER_INLINE_NONE, 0)
503        } else if let Some(value) = inner.inline_int_value() {
504            if (WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&value) {
505                let tag = match kind {
506                    WRAP_SOME => TAG_SOME,
507                    WRAP_OK => TAG_OK,
508                    WRAP_ERR => TAG_ERR,
509                    _ => unreachable!("invalid wrapper kind"),
510                };
511                return Self::new_inline_wrapper(
512                    tag,
513                    WRAPPER_INLINE_INT,
514                    Self::encode_wrapper_inline_int(value),
515                );
516            }
517            let idx = arena.push_boxed(inner);
518            match kind {
519                WRAP_SOME => Self::new_some(idx),
520                WRAP_OK => Self::new_ok(idx),
521                WRAP_ERR => Self::new_err(idx),
522                _ => unreachable!("invalid wrapper kind"),
523            }
524        } else {
525            let idx = arena.push_boxed(inner);
526            match kind {
527                WRAP_SOME => Self::new_some(idx),
528                WRAP_OK => Self::new_ok(idx),
529                WRAP_ERR => Self::new_err(idx),
530                _ => unreachable!("invalid wrapper kind"),
531            }
532        }
533    }
534
535    #[inline]
536    pub fn new_some_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
537        Self::wrap_value(WRAP_SOME, inner, arena)
538    }
539
540    #[inline]
541    pub fn new_ok_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
542        Self::wrap_value(WRAP_OK, inner, arena)
543    }
544
545    #[inline]
546    pub fn new_err_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
547        Self::wrap_value(WRAP_ERR, inner, arena)
548    }
549
550    // -- Arena-backed constructors -----------------------------------------
551
552    #[inline]
553    pub fn new_string(arena_index: u32) -> Self {
554        Self::encode(TAG_STRING, STRING_ARENA_BIT | (arena_index as u64))
555    }
556
557    #[inline]
558    fn new_small_string_bytes(bytes: &[u8]) -> Self {
559        debug_assert!(bytes.len() <= STRING_INLINE_MAX_BYTES);
560        let mut payload = (bytes.len() as u64) << STRING_INLINE_LEN_SHIFT;
561        for (idx, byte) in bytes.iter().enumerate() {
562            payload |= (*byte as u64) << (idx * 8);
563        }
564        Self::encode(TAG_STRING, payload)
565    }
566
567    #[inline]
568    pub fn small_string(self) -> Option<NanString<'static>> {
569        if !self.is_nan_boxed()
570            || self.tag() != TAG_STRING
571            || self.payload() & STRING_ARENA_BIT != 0
572        {
573            return None;
574        }
575        let payload = self.payload();
576        let len = ((payload & STRING_INLINE_LEN_MASK) >> STRING_INLINE_LEN_SHIFT) as u8;
577        if len as usize > STRING_INLINE_MAX_BYTES {
578            return None;
579        }
580        let mut bytes = [0u8; STRING_INLINE_MAX_BYTES];
581        for (idx, slot) in bytes.iter_mut().take(len as usize).enumerate() {
582            *slot = ((payload >> (idx * 8)) & 0xFF) as u8;
583        }
584        Some(NanString::Inline { len, bytes })
585    }
586
587    #[inline]
588    pub fn new_string_value<T: ArenaTypes>(s: &str, arena: &mut Arena<T>) -> Self {
589        if s.len() <= STRING_INLINE_MAX_BYTES {
590            Self::new_small_string_bytes(s.as_bytes())
591        } else {
592            Self::new_string(arena.push_string(s))
593        }
594    }
595
596    #[inline]
597    pub fn new_list(arena_index: u32) -> Self {
598        Self::encode(TAG_LIST, ARENA_REF_BIT | (arena_index as u64))
599    }
600
601    #[inline]
602    pub fn new_tuple(arena_index: u32) -> Self {
603        Self::encode(TAG_TUPLE, ARENA_REF_BIT | (arena_index as u64))
604    }
605
606    #[inline]
607    pub fn new_map(arena_index: u32) -> Self {
608        Self::encode(TAG_MAP, ARENA_REF_BIT | (arena_index as u64))
609    }
610
611    #[inline]
612    pub fn new_vector(arena_index: u32) -> Self {
613        Self::encode(TAG_VECTOR, ARENA_REF_BIT | (arena_index as u64))
614    }
615
616    #[inline]
617    pub fn new_record(arena_index: u32) -> Self {
618        Self::encode(TAG_RECORD, ARENA_REF_BIT | (arena_index as u64))
619    }
620
621    #[inline]
622    pub fn new_variant(arena_index: u32) -> Self {
623        Self::encode(TAG_VARIANT, ARENA_REF_BIT | (arena_index as u64))
624    }
625
626    #[inline]
627    fn new_symbol(symbol_kind: u64, symbol_index: u32) -> Self {
628        Self::encode(TAG_SYMBOL, symbol_kind | ((symbol_index as u64) << 2))
629    }
630
631    #[inline]
632    pub fn symbol_kind(self) -> u64 {
633        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
634        self.payload() & SYMBOL_KIND_MASK
635    }
636
637    #[inline]
638    pub fn symbol_index(self) -> u32 {
639        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
640        (self.payload() >> 2) as u32
641    }
642
643    #[inline]
644    pub fn new_nullary_variant(symbol_index: u32) -> Self {
645        Self::new_symbol(SYMBOL_NULLARY_VARIANT, symbol_index)
646    }
647
648    #[inline]
649    pub fn try_new_inline_variant(ctor_id: u32, inner: NanValue) -> Option<Self> {
650        if ctor_id > IV_CTOR_MASK as u32 {
651            return None;
652        }
653        let ctor_bits = (ctor_id as u64) << IV_CTOR_SHIFT;
654
655        if inner.is_nan_boxed() {
656            match inner.tag() {
657                TAG_INT if inner.payload() & INT_BIG_BIT == 0 => {
658                    let i = Self::decode_inline_int_payload(inner.payload());
659                    if (IV_INT_MIN..=IV_INT_MAX).contains(&i) {
660                        let int_bits = (i as u64) & IV_INT_MASK;
661                        return Some(Self::encode(TAG_INLINE_VARIANT, ctor_bits | int_bits));
662                    }
663                }
664                TAG_IMMEDIATE => {
665                    let imm = match inner.payload() {
666                        IMM_FALSE => IV_IMM_FALSE,
667                        IMM_TRUE => IV_IMM_TRUE,
668                        IMM_UNIT => IV_IMM_UNIT,
669                        _ => return None,
670                    };
671                    return Some(Self::encode(
672                        TAG_INLINE_VARIANT,
673                        ctor_bits | IV_KIND_BIT | (imm << IV_IMM_SHIFT),
674                    ));
675                }
676                TAG_NONE => {
677                    return Some(Self::encode(
678                        TAG_INLINE_VARIANT,
679                        ctor_bits | IV_KIND_BIT | (IV_IMM_NONE << IV_IMM_SHIFT),
680                    ));
681                }
682                _ => {}
683            }
684        }
685        None
686    }
687
688    #[inline]
689    pub fn inline_variant_ctor_id(self) -> u32 {
690        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT);
691        ((self.payload() >> IV_CTOR_SHIFT) & IV_CTOR_MASK) as u32
692    }
693
694    #[inline]
695    pub fn inline_variant_inner(self) -> NanValue {
696        debug_assert!(self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT);
697        let payload = self.payload();
698        if payload & IV_KIND_BIT == 0 {
699            let raw = payload & IV_INT_MASK;
700            let i = if raw & IV_INT_SIGN_BIT != 0 {
701                (raw | !IV_INT_MASK) as i64
702            } else {
703                raw as i64
704            };
705            Self::new_int_inline(i)
706        } else {
707            let imm = (payload >> IV_IMM_SHIFT) & 0b11;
708            match imm {
709                IV_IMM_FALSE => Self::FALSE,
710                IV_IMM_TRUE => Self::TRUE,
711                IV_IMM_UNIT => Self::UNIT,
712                IV_IMM_NONE => Self::NONE,
713                _ => unreachable!(),
714            }
715        }
716    }
717
718    #[inline]
719    pub fn new_fn(arena_index: u32) -> Self {
720        Self::new_symbol(SYMBOL_FN, arena_index)
721    }
722
723    #[inline]
724    pub fn new_builtin(arena_index: u32) -> Self {
725        Self::new_symbol(SYMBOL_BUILTIN, arena_index)
726    }
727
728    #[inline]
729    pub fn new_namespace(arena_index: u32) -> Self {
730        Self::new_symbol(SYMBOL_NAMESPACE, arena_index)
731    }
732
733    #[inline]
734    pub fn arena_index(self) -> u32 {
735        (self.payload() & !ARENA_REF_BIT) as u32
736    }
737
738    #[inline]
739    pub fn heap_index(self) -> Option<u32> {
740        if !self.is_nan_boxed() {
741            return None;
742        }
743        match self.tag() {
744            TAG_INT => {
745                let p = self.payload();
746                if p & INT_BIG_BIT != 0 {
747                    Some((p & !INT_BIG_BIT) as u32)
748                } else {
749                    None
750                }
751            }
752            TAG_STRING | TAG_SOME | TAG_OK | TAG_ERR | TAG_LIST | TAG_TUPLE | TAG_MAP
753            | TAG_RECORD | TAG_VARIANT | TAG_VECTOR => {
754                (self.payload() & ARENA_REF_BIT != 0).then_some(self.arena_index())
755            }
756            _ => None,
757        }
758    }
759
760    #[inline]
761    pub fn with_heap_index(self, index: u32) -> Self {
762        if !self.is_nan_boxed() {
763            return self;
764        }
765        match self.tag() {
766            TAG_INT => {
767                debug_assert!(self.payload() & INT_BIG_BIT != 0);
768                Self::new_int_arena(index)
769            }
770            TAG_SOME => Self::new_some(index),
771            TAG_OK => Self::new_ok(index),
772            TAG_ERR => Self::new_err(index),
773            TAG_STRING => Self::new_string(index),
774            TAG_LIST => Self::new_list(index),
775            TAG_TUPLE => Self::new_tuple(index),
776            TAG_MAP => Self::new_map(index),
777            TAG_VECTOR => Self::new_vector(index),
778            TAG_RECORD => Self::new_record(index),
779            TAG_VARIANT => Self::new_variant(index),
780            _ => self,
781        }
782    }
783
784    // -- Type checks -------------------------------------------------------
785
786    #[inline]
787    pub fn is_float(self) -> bool {
788        !self.is_nan_boxed()
789    }
790
791    #[inline]
792    pub fn is_int(self) -> bool {
793        self.is_nan_boxed() && self.tag() == TAG_INT
794    }
795
796    #[inline]
797    pub fn is_bool(self) -> bool {
798        self.is_nan_boxed()
799            && self.tag() == TAG_IMMEDIATE
800            && (self.payload() == IMM_TRUE || self.payload() == IMM_FALSE)
801    }
802
803    #[inline]
804    pub fn is_unit(self) -> bool {
805        self.0 == Self::UNIT.0
806    }
807
808    #[inline]
809    pub fn is_none(self) -> bool {
810        self.0 == Self::NONE.0
811    }
812
813    #[inline]
814    pub fn is_some(self) -> bool {
815        self.is_nan_boxed() && self.tag() == TAG_SOME
816    }
817
818    #[inline]
819    pub fn is_ok(self) -> bool {
820        self.is_nan_boxed() && self.tag() == TAG_OK
821    }
822
823    #[inline]
824    pub fn is_err(self) -> bool {
825        self.is_nan_boxed() && self.tag() == TAG_ERR
826    }
827
828    #[inline]
829    pub fn is_string(self) -> bool {
830        self.is_nan_boxed() && self.tag() == TAG_STRING
831    }
832
833    pub fn string_eq<T: ArenaTypes>(self, other: NanValue, arena: &Arena<T>) -> bool {
834        if self.bits() == other.bits() {
835            return true;
836        }
837        if !self.is_string() || !other.is_string() {
838            return false;
839        }
840        arena.get_string_value(self).as_str() == arena.get_string_value(other).as_str()
841    }
842
843    #[inline]
844    pub fn is_list(self) -> bool {
845        self.is_nan_boxed() && self.tag() == TAG_LIST
846    }
847
848    #[inline]
849    pub fn is_record(self) -> bool {
850        self.is_nan_boxed() && self.tag() == TAG_RECORD
851    }
852
853    #[inline]
854    pub fn is_fn(self) -> bool {
855        self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_FN
856    }
857
858    #[inline]
859    pub fn is_variant(self) -> bool {
860        self.is_nan_boxed()
861            && (self.tag() == TAG_VARIANT
862                || self.tag() == TAG_INLINE_VARIANT
863                || (self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NULLARY_VARIANT))
864    }
865
866    #[inline]
867    pub fn is_inline_variant(self) -> bool {
868        self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT
869    }
870
871    #[inline]
872    pub fn is_map(self) -> bool {
873        self.is_nan_boxed() && self.tag() == TAG_MAP
874    }
875
876    #[inline]
877    pub fn is_vector(self) -> bool {
878        self.is_nan_boxed() && self.tag() == TAG_VECTOR
879    }
880
881    #[inline]
882    pub fn is_empty_vector_immediate(self) -> bool {
883        self.is_nan_boxed() && self.tag() == TAG_VECTOR && self.payload() & ARENA_REF_BIT == 0
884    }
885
886    #[inline]
887    pub fn is_tuple(self) -> bool {
888        self.is_nan_boxed() && self.tag() == TAG_TUPLE
889    }
890
891    #[inline]
892    pub fn is_builtin(self) -> bool {
893        self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_BUILTIN
894    }
895
896    #[inline]
897    pub fn is_namespace(self) -> bool {
898        self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NAMESPACE
899    }
900
901    #[inline]
902    pub fn is_empty_list_immediate(self) -> bool {
903        self.is_nan_boxed() && self.tag() == TAG_LIST && self.payload() & ARENA_REF_BIT == 0
904    }
905
906    #[inline]
907    pub fn is_empty_map_immediate(self) -> bool {
908        self.is_nan_boxed() && self.tag() == TAG_MAP && self.payload() & ARENA_REF_BIT == 0
909    }
910
911    pub fn type_name(self) -> &'static str {
912        if self.is_float() {
913            return "Float";
914        }
915        match self.tag() {
916            TAG_INT => "Int",
917            TAG_IMMEDIATE => match self.payload() {
918                IMM_FALSE | IMM_TRUE => "Bool",
919                IMM_UNIT => "Unit",
920                _ => "Unknown",
921            },
922            TAG_SOME => "Option.Some",
923            TAG_NONE => "Option.None",
924            TAG_OK => "Result.Ok",
925            TAG_ERR => "Result.Err",
926            TAG_STRING => "String",
927            TAG_LIST => "List",
928            TAG_TUPLE => "Tuple",
929            TAG_MAP => "Map",
930            TAG_VECTOR => "Vector",
931            TAG_RECORD => "Record",
932            TAG_VARIANT | TAG_INLINE_VARIANT => "Variant",
933            TAG_SYMBOL => match self.symbol_kind() {
934                SYMBOL_FN => "Fn",
935                SYMBOL_BUILTIN => "Builtin",
936                SYMBOL_NAMESPACE => "Namespace",
937                SYMBOL_NULLARY_VARIANT => "Variant",
938                _ => "Unknown",
939            },
940            _ => "Unknown",
941        }
942    }
943
944    #[inline]
945    pub fn variant_ctor_id<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<u32> {
946        if !self.is_nan_boxed() {
947            return None;
948        }
949        match self.tag() {
950            TAG_INLINE_VARIANT => Some(self.inline_variant_ctor_id()),
951            TAG_VARIANT => {
952                let (type_id, variant_id, _) = arena.get_variant(self.arena_index());
953                arena.find_ctor_id(type_id, variant_id)
954            }
955            TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
956                Some(arena.get_nullary_variant_ctor(self.symbol_index()))
957            }
958            _ => None,
959        }
960    }
961
962    #[inline]
963    pub fn variant_parts<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<(u32, u16, &[NanValue])> {
964        if !self.is_nan_boxed() {
965            return None;
966        }
967        match self.tag() {
968            TAG_VARIANT => {
969                let (type_id, variant_id, fields) = arena.get_variant(self.arena_index());
970                Some((type_id, variant_id, fields))
971            }
972            TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
973                let (type_id, variant_id) =
974                    arena.get_ctor_parts(arena.get_nullary_variant_ctor(self.symbol_index()));
975                Some((type_id, variant_id, &[]))
976            }
977            _ => None,
978        }
979    }
980
981    #[inline]
982    pub fn variant_single_field<T: ArenaTypes>(self, arena: &Arena<T>) -> NanValue {
983        if self.tag() == TAG_INLINE_VARIANT {
984            self.inline_variant_inner()
985        } else {
986            let (_, _, fields) = arena.get_variant(self.arena_index());
987            debug_assert_eq!(fields.len(), 1);
988            fields[0]
989        }
990    }
991
992    #[inline]
993    pub fn inline_variant_info<T: ArenaTypes>(
994        self,
995        arena: &Arena<T>,
996    ) -> Option<(u32, u16, NanValue)> {
997        if !self.is_nan_boxed() || self.tag() != TAG_INLINE_VARIANT {
998            return None;
999        }
1000        let ctor_id = self.inline_variant_ctor_id();
1001        let (type_id, variant_id) = arena.get_ctor_parts(ctor_id);
1002        Some((type_id, variant_id, self.inline_variant_inner()))
1003    }
1004
1005    #[inline]
1006    pub fn bits(self) -> u64 {
1007        self.0
1008    }
1009
1010    #[inline]
1011    pub fn from_bits(bits: u64) -> Self {
1012        NanValue(bits)
1013    }
1014
1015    pub fn map_key_hash<T: ArenaTypes>(self, arena: &Arena<T>) -> u64 {
1016        if self.is_string() {
1017            use core::hash::{Hash, Hasher};
1018            let mut hasher = DefaultHasher::new();
1019            3u8.hash(&mut hasher);
1020            arena.get_string_value(self).hash(&mut hasher);
1021            hasher.finish()
1022        } else {
1023            self.bits()
1024        }
1025    }
1026}
1027
1028// -- Debug -----------------------------------------------------------------
1029
1030impl core::fmt::Debug for NanValue {
1031    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1032        if self.is_float() {
1033            return write!(f, "Float({})", self.as_float());
1034        }
1035        match self.tag() {
1036            TAG_INT => {
1037                if self.payload() & INT_BIG_BIT != 0 {
1038                    write!(f, "Int(arena:{})", (self.payload() & !INT_BIG_BIT) as u32)
1039                } else {
1040                    write!(
1041                        f,
1042                        "Int({})",
1043                        Self::decode_inline_int_payload(self.payload())
1044                    )
1045                }
1046            }
1047            TAG_IMMEDIATE => match self.payload() {
1048                IMM_FALSE => write!(f, "False"),
1049                IMM_TRUE => write!(f, "True"),
1050                IMM_UNIT => write!(f, "Unit"),
1051                _ => write!(f, "Immediate({})", self.payload()),
1052            },
1053            TAG_NONE => write!(f, "None"),
1054            TAG_SOME | TAG_OK | TAG_ERR => {
1055                let kind = match self.tag() {
1056                    TAG_SOME => "Some",
1057                    TAG_OK => "Ok",
1058                    TAG_ERR => "Err",
1059                    _ => "?",
1060                };
1061                if self.payload() & ARENA_REF_BIT != 0 {
1062                    write!(f, "{}(arena:{})", kind, self.arena_index())
1063                } else if let Some(inner) = self.wrapper_inline_inner() {
1064                    write!(f, "{}({:?})", kind, inner)
1065                } else {
1066                    write!(f, "{}(?)", kind)
1067                }
1068            }
1069            TAG_SYMBOL => match self.symbol_kind() {
1070                SYMBOL_FN => write!(f, "Fn(symbol:{})", self.symbol_index()),
1071                SYMBOL_BUILTIN => write!(f, "Builtin(symbol:{})", self.symbol_index()),
1072                SYMBOL_NAMESPACE => write!(f, "Namespace(symbol:{})", self.symbol_index()),
1073                SYMBOL_NULLARY_VARIANT => {
1074                    write!(f, "NullaryVariant(symbol:{})", self.symbol_index())
1075                }
1076                _ => write!(f, "Symbol({})", self.payload()),
1077            },
1078            TAG_STRING => {
1079                if let Some(s) = self.small_string() {
1080                    write!(f, "String({:?})", s.as_str())
1081                } else {
1082                    write!(f, "String(arena:{})", self.arena_index())
1083                }
1084            }
1085            TAG_INLINE_VARIANT => {
1086                let ctor = self.inline_variant_ctor_id();
1087                let inner = self.inline_variant_inner();
1088                write!(f, "InlineVariant(ctor:{}, {:?})", ctor, inner)
1089            }
1090            TAG_LIST if self.is_empty_list_immediate() => write!(f, "EmptyList"),
1091            TAG_MAP if self.is_empty_map_immediate() => write!(f, "EmptyMap"),
1092            TAG_VECTOR if self.is_empty_vector_immediate() => write!(f, "EmptyVector"),
1093            _ => write!(f, "{}(arena:{})", self.type_name(), self.arena_index()),
1094        }
1095    }
1096}
1097
1098// ---------------------------------------------------------------------------
1099// A simple DefaultHasher for no_std — mirrors std::collections::hash_map::DefaultHasher
1100// ---------------------------------------------------------------------------
1101
1102/// SipHasher-like hasher for use in no_std contexts.
1103/// When std is available, delegates to `std::collections::hash_map::DefaultHasher`.
1104struct DefaultHasher {
1105    #[cfg(feature = "std")]
1106    inner: std::collections::hash_map::DefaultHasher,
1107    #[cfg(not(feature = "std"))]
1108    state: u64,
1109}
1110
1111impl DefaultHasher {
1112    fn new() -> Self {
1113        #[cfg(feature = "std")]
1114        {
1115            Self {
1116                inner: std::collections::hash_map::DefaultHasher::new(),
1117            }
1118        }
1119        #[cfg(not(feature = "std"))]
1120        {
1121            Self {
1122                state: 0xcbf29ce484222325,
1123            }
1124        }
1125    }
1126}
1127
1128impl Hasher for DefaultHasher {
1129    #[cfg(feature = "std")]
1130    fn finish(&self) -> u64 {
1131        self.inner.finish()
1132    }
1133    #[cfg(feature = "std")]
1134    fn write(&mut self, bytes: &[u8]) {
1135        self.inner.write(bytes)
1136    }
1137
1138    #[cfg(not(feature = "std"))]
1139    fn finish(&self) -> u64 {
1140        self.state
1141    }
1142    #[cfg(not(feature = "std"))]
1143    fn write(&mut self, bytes: &[u8]) {
1144        for &b in bytes {
1145            self.state ^= b as u64;
1146            self.state = self.state.wrapping_mul(0x100000001b3);
1147        }
1148    }
1149}
1150
1151// ---------------------------------------------------------------------------
1152// Arena
1153// ---------------------------------------------------------------------------
1154
1155#[derive(Debug, Clone)]
1156pub struct Arena<T: ArenaTypes> {
1157    young_entries: Vec<ArenaEntry<T>>,
1158    yard_entries: Vec<ArenaEntry<T>>,
1159    handoff_entries: Vec<ArenaEntry<T>>,
1160    stable_entries: Vec<ArenaEntry<T>>,
1161    scratch_young: Vec<u32>,
1162    scratch_yard: Vec<u32>,
1163    scratch_handoff: Vec<u32>,
1164    scratch_stable: Vec<u32>,
1165    peak_usage: ArenaUsage,
1166    alloc_space: AllocSpace,
1167    pub type_names: Vec<String>,
1168    pub type_field_names: Vec<Vec<String>>,
1169    pub type_variant_names: Vec<Vec<String>>,
1170    pub type_variant_ctor_ids: Vec<Vec<u32>>,
1171    pub ctor_to_type_variant: Vec<(u32, u16)>,
1172    pub symbol_entries: Vec<ArenaSymbol<T>>,
1173}
1174
1175#[derive(Debug, Clone)]
1176pub enum ArenaEntry<T: ArenaTypes> {
1177    Int(i64),
1178    String(Rc<str>),
1179    List(ArenaList),
1180    Tuple(Vec<NanValue>),
1181    Map(T::Map),
1182    Vector(Vec<NanValue>),
1183    Record {
1184        type_id: u32,
1185        fields: Vec<NanValue>,
1186    },
1187    Variant {
1188        type_id: u32,
1189        variant_id: u16,
1190        fields: Vec<NanValue>,
1191    },
1192    Fn(Rc<T::Fn>),
1193    Builtin(Rc<str>),
1194    Namespace {
1195        name: Rc<str>,
1196        members: Vec<(Rc<str>, NanValue)>,
1197    },
1198    Boxed(NanValue),
1199}
1200
1201#[derive(Debug, Clone)]
1202pub enum ArenaSymbol<T: ArenaTypes> {
1203    Fn(Rc<T::Fn>),
1204    Builtin(Rc<str>),
1205    Namespace {
1206        name: Rc<str>,
1207        members: Vec<(Rc<str>, NanValue)>,
1208    },
1209    NullaryVariant {
1210        ctor_id: u32,
1211    },
1212}
1213
1214#[derive(Debug, Clone)]
1215pub enum ArenaList {
1216    Flat {
1217        items: Rc<Vec<NanValue>>,
1218        start: usize,
1219    },
1220    Prepend {
1221        head: NanValue,
1222        tail: NanValue,
1223        len: usize,
1224    },
1225    Concat {
1226        left: NanValue,
1227        right: NanValue,
1228        len: usize,
1229    },
1230    Segments {
1231        current: NanValue,
1232        rest: Rc<Vec<NanValue>>,
1233        start: usize,
1234        len: usize,
1235    },
1236}
1237
1238const LIST_APPEND_CHUNK_LIMIT: usize = 128;
1239
1240#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1241pub(crate) enum HeapSpace {
1242    Young = 0,
1243    Yard = 1,
1244    Handoff = 2,
1245    Stable = 3,
1246}
1247
1248#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1249pub enum AllocSpace {
1250    Young,
1251    Yard,
1252    Handoff,
1253}
1254
1255#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
1256pub struct ArenaUsage {
1257    pub young: usize,
1258    pub yard: usize,
1259    pub handoff: usize,
1260    pub stable: usize,
1261}
1262
1263impl ArenaUsage {
1264    pub fn total(self) -> usize {
1265        self.young + self.yard + self.handoff + self.stable
1266    }
1267}
1268
1269pub(crate) const HEAP_SPACE_SHIFT: u32 = 30;
1270pub(crate) const HEAP_SPACE_MASK_U32: u32 = 0b11 << HEAP_SPACE_SHIFT;
1271pub(crate) const HEAP_INDEX_MASK_U32: u32 = (1 << HEAP_SPACE_SHIFT) - 1;
1272
1273mod arena;
1274mod compare;
1275mod lists;
1276mod memory;
1277
1278// ---------------------------------------------------------------------------
1279// Feature-gated MapLike impl for aver_rt::AverMap
1280// ---------------------------------------------------------------------------
1281
1282#[cfg(feature = "runtime")]
1283/// `PersistentMap` type alias used by the VM arena.
1284pub type PersistentMap = aver_rt::AverMap<u64, (NanValue, NanValue)>;
1285
1286#[cfg(feature = "runtime")]
1287impl MapLike for aver_rt::AverMap<u64, (NanValue, NanValue)> {
1288    fn new() -> Self {
1289        aver_rt::AverMap::new()
1290    }
1291
1292    fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)> {
1293        aver_rt::AverMap::get(self, key)
1294    }
1295
1296    fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self {
1297        aver_rt::AverMap::insert(self, key, value)
1298    }
1299
1300    fn insert_owned(self, key: u64, value: (NanValue, NanValue)) -> Self {
1301        aver_rt::AverMap::insert_owned(self, key, value)
1302    }
1303
1304    fn len(&self) -> usize {
1305        aver_rt::AverMap::len(self)
1306    }
1307
1308    fn is_empty(&self) -> bool {
1309        aver_rt::AverMap::is_empty(self)
1310    }
1311
1312    fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))> {
1313        aver_rt::AverMap::iter(self)
1314    }
1315
1316    fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)> {
1317        aver_rt::AverMap::values(self)
1318    }
1319}
1320
1321// ---------------------------------------------------------------------------
1322// Stub PersistentMap when runtime is off (BTreeMap-based)
1323// ---------------------------------------------------------------------------
1324
1325#[cfg(not(feature = "runtime"))]
1326/// Stub `PersistentMap` for non-runtime builds (e.g. wasm-compile only).
1327#[derive(Clone, Debug)]
1328pub struct PersistentMap(alloc::collections::BTreeMap<u64, (NanValue, NanValue)>);
1329
1330#[cfg(not(feature = "runtime"))]
1331impl MapLike for PersistentMap {
1332    fn new() -> Self {
1333        PersistentMap(alloc::collections::BTreeMap::new())
1334    }
1335
1336    fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)> {
1337        self.0.get(key)
1338    }
1339
1340    fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self {
1341        let mut m = self.0.clone();
1342        m.insert(key, value);
1343        PersistentMap(m)
1344    }
1345
1346    fn len(&self) -> usize {
1347        self.0.len()
1348    }
1349
1350    fn is_empty(&self) -> bool {
1351        self.0.is_empty()
1352    }
1353
1354    fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))> {
1355        self.0.iter()
1356    }
1357
1358    fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)> {
1359        self.0.values()
1360    }
1361}