1#![cfg_attr(not(feature = "std"), no_std)]
38
39extern crate alloc;
40
41use alloc::format;
42use alloc::string::String;
43use alloc::sync::Arc as Rc;
44use alloc::vec::Vec;
45use core::cmp::Ordering;
46use core::hash::{Hash, Hasher};
47use core::ops::Deref;
48
49pub trait ArenaTypes: Clone + core::fmt::Debug + 'static {
58 type Fn: Clone + core::fmt::Debug + FnValueName;
60 type Map: Clone + core::fmt::Debug + MapLike;
62}
63
64pub trait FnValueName {
66 fn name(&self) -> &str;
67}
68
69pub trait MapLike: Sized {
73 fn new() -> Self;
74 fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)>;
75 fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self;
76 fn insert_owned(self, key: u64, value: (NanValue, NanValue)) -> Self {
78 self.insert(key, value) }
80 fn rewrite_values_mut(&mut self, f: impl FnMut(&mut (NanValue, NanValue)));
83 fn len(&self) -> usize;
84 fn is_empty(&self) -> bool {
85 self.len() == 0
86 }
87 fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))>;
88 fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)>;
89}
90
91const QNAN: u64 = 0x7FFC_0000_0000_0000;
96const QNAN_MASK: u64 = 0xFFFC_0000_0000_0000;
97const TAG_SHIFT: u32 = 46;
98const TAG_MASK: u64 = 0xF;
99const PAYLOAD_MASK: u64 = (1u64 << 46) - 1;
100
101pub const TAG_IMMEDIATE: u64 = 0;
102pub const TAG_SYMBOL: u64 = 1;
103pub const TAG_INT: u64 = 2;
104pub const TAG_STRING: u64 = 3;
105pub const TAG_SOME: u64 = 4;
106pub const TAG_NONE: u64 = 5;
107pub const TAG_OK: u64 = 6;
108pub const TAG_ERR: u64 = 7;
109pub const TAG_LIST: u64 = 8;
110pub const TAG_VECTOR: u64 = 9;
111pub const TAG_MAP: u64 = 10;
112pub const TAG_RECORD: u64 = 11;
113pub const TAG_VARIANT: u64 = 12;
114pub const TAG_TUPLE: u64 = 13;
115pub const TAG_INLINE_VARIANT: u64 = 14;
116
117pub const SYMBOL_FN: u64 = 0;
118pub const SYMBOL_BUILTIN: u64 = 1;
119pub const SYMBOL_NAMESPACE: u64 = 2;
120pub const SYMBOL_NULLARY_VARIANT: u64 = 3;
121const SYMBOL_KIND_MASK: u64 = 0b11;
122
123pub const IMM_FALSE: u64 = 0;
124pub const IMM_TRUE: u64 = 1;
125pub const IMM_UNIT: u64 = 2;
126
127pub const WRAP_SOME: u64 = 0;
128pub const WRAP_OK: u64 = 1;
129pub const WRAP_ERR: u64 = 2;
130const WRAPPER_INLINE_KIND_SHIFT: u32 = 43;
131const WRAPPER_INLINE_KIND_MASK: u64 = 0b11 << WRAPPER_INLINE_KIND_SHIFT;
132const WRAPPER_INLINE_PAYLOAD_MASK: u64 = (1u64 << WRAPPER_INLINE_KIND_SHIFT) - 1;
133const WRAPPER_INLINE_IMMEDIATE: u64 = 0;
134const WRAPPER_INLINE_INT: u64 = 1;
135const WRAPPER_INLINE_NONE: u64 = 2;
136const WRAPPER_INT_INLINE_MASK: u64 = WRAPPER_INLINE_PAYLOAD_MASK;
137const WRAPPER_INT_INLINE_MAX: i64 = (1i64 << 42) - 1;
138const WRAPPER_INT_INLINE_MIN: i64 = -(1i64 << 42);
139
140pub const ARENA_REF_BIT: u64 = 1u64 << 45;
141const INT_BIG_BIT: u64 = ARENA_REF_BIT;
142const INT_INLINE_MASK: u64 = (1u64 << 45) - 1;
143pub const INT_INLINE_MAX: i64 = (1i64 << 44) - 1;
144pub const INT_INLINE_MIN: i64 = -(1i64 << 44);
145
146const STRING_ARENA_BIT: u64 = ARENA_REF_BIT;
147const STRING_INLINE_LEN_SHIFT: u32 = 40;
148const STRING_INLINE_LEN_MASK: u64 = 0b111 << STRING_INLINE_LEN_SHIFT;
149const STRING_INLINE_MAX_BYTES: usize = 5;
150
151const IV_CTOR_SHIFT: u32 = 30;
153const IV_CTOR_MASK: u64 = 0xFFFF;
154const IV_KIND_BIT: u64 = 1 << 29;
155const IV_INT_MASK: u64 = (1u64 << 29) - 1;
156const IV_INT_SIGN_BIT: u64 = 1u64 << 28;
157const IV_INT_MAX: i64 = (1i64 << 28) - 1;
158const IV_INT_MIN: i64 = -(1i64 << 28);
159const IV_IMM_SHIFT: u32 = 27;
160const IV_IMM_FALSE: u64 = 0;
161const IV_IMM_TRUE: u64 = 1;
162const IV_IMM_UNIT: u64 = 2;
163const IV_IMM_NONE: u64 = 3;
164
165#[derive(Clone, Copy)]
170pub struct NanValue(u64);
171
172#[derive(Clone, Copy, Debug)]
173pub enum NanString<'a> {
174 Borrowed(&'a str),
175 Inline {
176 len: u8,
177 bytes: [u8; STRING_INLINE_MAX_BYTES],
178 },
179}
180
181impl<'a> NanString<'a> {
182 #[inline]
183 pub fn as_str(&self) -> &str {
184 match self {
185 NanString::Borrowed(s) => s,
186 NanString::Inline { len, bytes } => core::str::from_utf8(&bytes[..*len as usize])
187 .expect("NanString inline payload must be valid UTF-8"),
188 }
189 }
190}
191
192impl Deref for NanString<'_> {
193 type Target = str;
194
195 #[inline]
196 fn deref(&self) -> &Self::Target {
197 self.as_str()
198 }
199}
200
201impl PartialEq for NanString<'_> {
202 #[inline]
203 fn eq(&self, other: &Self) -> bool {
204 self.as_str() == other.as_str()
205 }
206}
207
208impl Eq for NanString<'_> {}
209
210impl PartialEq<&str> for NanString<'_> {
211 #[inline]
212 fn eq(&self, other: &&str) -> bool {
213 self.as_str() == *other
214 }
215}
216
217impl PartialEq<NanString<'_>> for &str {
218 #[inline]
219 fn eq(&self, other: &NanString<'_>) -> bool {
220 *self == other.as_str()
221 }
222}
223
224impl PartialOrd for NanString<'_> {
225 #[inline]
226 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
227 Some(self.cmp(other))
228 }
229}
230
231impl Ord for NanString<'_> {
232 #[inline]
233 fn cmp(&self, other: &Self) -> Ordering {
234 self.as_str().cmp(other.as_str())
235 }
236}
237
238impl Hash for NanString<'_> {
239 #[inline]
240 fn hash<H: Hasher>(&self, state: &mut H) {
241 self.as_str().hash(state);
242 }
243}
244
245impl core::fmt::Display for NanString<'_> {
246 #[inline]
247 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
248 f.write_str(self.as_str())
249 }
250}
251
252impl NanValue {
255 #[inline]
256 fn decode_inline_int_payload(payload: u64) -> i64 {
257 debug_assert!(payload & INT_BIG_BIT == 0);
258 let raw = payload & INT_INLINE_MASK;
259 if raw & (1u64 << 44) != 0 {
260 (raw | !INT_INLINE_MASK) as i64
261 } else {
262 raw as i64
263 }
264 }
265
266 #[inline]
267 pub fn encode(tag: u64, payload: u64) -> Self {
268 debug_assert!(tag <= TAG_MASK);
269 debug_assert!(payload <= PAYLOAD_MASK);
270 NanValue(QNAN | (tag << TAG_SHIFT) | payload)
271 }
272
273 #[inline]
274 pub fn is_nan_boxed(self) -> bool {
275 (self.0 & QNAN_MASK) == QNAN
276 }
277
278 #[inline]
279 pub fn tag(self) -> u64 {
280 (self.0 >> TAG_SHIFT) & TAG_MASK
281 }
282
283 #[inline]
284 pub fn payload(self) -> u64 {
285 self.0 & PAYLOAD_MASK
286 }
287
288 #[inline]
291 pub fn new_float(f: f64) -> Self {
292 let bits = f.to_bits();
293 if (bits & QNAN_MASK) == QNAN {
294 NanValue(bits ^ 1)
295 } else {
296 NanValue(bits)
297 }
298 }
299
300 #[inline]
301 pub fn as_float(self) -> f64 {
302 f64::from_bits(self.0)
303 }
304
305 #[inline]
306 pub fn new_int_inline(i: i64) -> Self {
307 debug_assert!((INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i));
308 let payload = (i as u64) & INT_INLINE_MASK;
309 Self::encode(TAG_INT, payload)
310 }
311
312 #[inline]
313 pub fn new_int_arena(arena_index: u32) -> Self {
314 Self::encode(TAG_INT, INT_BIG_BIT | (arena_index as u64))
315 }
316
317 #[inline]
318 pub fn new_int<T: ArenaTypes>(i: i64, arena: &mut Arena<T>) -> Self {
319 if (INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i) {
320 Self::new_int_inline(i)
321 } else {
322 let idx = arena.push_i64(i);
323 Self::new_int_arena(idx)
324 }
325 }
326
327 #[inline]
328 pub fn as_int<T: ArenaTypes>(self, arena: &Arena<T>) -> i64 {
329 let p = self.payload();
330 if p & INT_BIG_BIT != 0 {
331 let idx = (p & !INT_BIG_BIT) as u32;
332 arena.get_i64(idx)
333 } else {
334 Self::decode_inline_int_payload(p)
335 }
336 }
337
338 #[inline]
339 pub fn inline_int_payload(self) -> Option<u64> {
340 (self.is_nan_boxed() && self.tag() == TAG_INT && self.payload() & INT_BIG_BIT == 0)
341 .then_some(self.payload())
342 }
343
344 #[inline]
345 pub fn inline_int_value(self) -> Option<i64> {
346 self.inline_int_payload()
347 .map(Self::decode_inline_int_payload)
348 }
349
350 pub const FALSE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_FALSE);
353 pub const TRUE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_TRUE);
354 pub const UNIT: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_UNIT);
355 pub const NONE: NanValue = NanValue(QNAN | (TAG_NONE << TAG_SHIFT));
356 pub const EMPTY_LIST: NanValue = NanValue(QNAN | (TAG_LIST << TAG_SHIFT));
357 pub const EMPTY_MAP: NanValue = NanValue(QNAN | (TAG_MAP << TAG_SHIFT));
358 pub const EMPTY_VECTOR: NanValue = NanValue(QNAN | (TAG_VECTOR << TAG_SHIFT));
359 pub const EMPTY_STRING: NanValue = NanValue(QNAN | (TAG_STRING << TAG_SHIFT));
360
361 #[inline]
362 pub fn new_bool(b: bool) -> Self {
363 if b { Self::TRUE } else { Self::FALSE }
364 }
365
366 #[inline]
367 pub fn as_bool(self) -> bool {
368 self.0 == Self::TRUE.0
369 }
370
371 #[inline]
372 pub fn plain_immediate_payload(self) -> Option<u64> {
373 (self.is_nan_boxed() && self.tag() == TAG_IMMEDIATE && self.payload() <= IMM_UNIT)
374 .then_some(self.payload())
375 }
376
377 #[inline]
378 pub fn wrapper_kind(self) -> u64 {
379 match self.tag() {
380 TAG_SOME => WRAP_SOME,
381 TAG_OK => WRAP_OK,
382 TAG_ERR => WRAP_ERR,
383 _ => panic!("wrapper_kind() called on non-wrapper"),
384 }
385 }
386
387 #[inline]
388 fn wrapper_inline_kind(self) -> Option<u64> {
389 if !self.is_nan_boxed() {
390 return None;
391 }
392 match self.tag() {
393 TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT == 0 => {
394 Some((self.payload() & WRAPPER_INLINE_KIND_MASK) >> WRAPPER_INLINE_KIND_SHIFT)
395 }
396 _ => None,
397 }
398 }
399
400 #[inline]
401 fn decode_wrapper_inline_int_payload(payload: u64) -> i64 {
402 let raw = payload & WRAPPER_INT_INLINE_MASK;
403 if raw & (1u64 << 42) != 0 {
404 (raw | !WRAPPER_INT_INLINE_MASK) as i64
405 } else {
406 raw as i64
407 }
408 }
409
410 #[inline]
411 fn encode_wrapper_inline_int(i: i64) -> u64 {
412 debug_assert!((WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&i));
413 (i as u64) & WRAPPER_INT_INLINE_MASK
414 }
415
416 #[inline]
417 pub fn wrapper_inline_inner(self) -> Option<NanValue> {
418 let kind = self.wrapper_inline_kind()?;
419 let payload = self.payload() & WRAPPER_INLINE_PAYLOAD_MASK;
420 match kind {
421 WRAPPER_INLINE_IMMEDIATE => Some(Self::encode(TAG_IMMEDIATE, payload)),
422 WRAPPER_INLINE_INT => Some(Self::new_int_inline(
423 Self::decode_wrapper_inline_int_payload(payload),
424 )),
425 WRAPPER_INLINE_NONE => Some(Self::NONE),
426 _ => None,
427 }
428 }
429
430 #[inline]
431 fn new_inline_wrapper(tag: u64, inline_kind: u64, payload: u64) -> Self {
432 debug_assert!(matches!(tag, TAG_SOME | TAG_OK | TAG_ERR));
433 debug_assert!(inline_kind <= WRAPPER_INLINE_NONE);
434 debug_assert!(payload <= WRAPPER_INLINE_PAYLOAD_MASK);
435 Self::encode(tag, (inline_kind << WRAPPER_INLINE_KIND_SHIFT) | payload)
436 }
437
438 #[inline]
439 pub fn wrapper_parts<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<(u64, NanValue)> {
440 if !self.is_nan_boxed() {
441 return None;
442 }
443 match self.tag() {
444 TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT != 0 => {
445 Some((self.wrapper_kind(), arena.get_boxed(self.arena_index())))
446 }
447 TAG_SOME | TAG_OK | TAG_ERR => self
448 .wrapper_inline_inner()
449 .map(|inner| (self.wrapper_kind(), inner)),
450 _ => None,
451 }
452 }
453
454 #[inline]
457 pub fn new_some(inner_index: u32) -> Self {
458 Self::encode(TAG_SOME, ARENA_REF_BIT | (inner_index as u64))
459 }
460
461 #[inline]
462 pub fn new_ok(inner_index: u32) -> Self {
463 Self::encode(TAG_OK, ARENA_REF_BIT | (inner_index as u64))
464 }
465
466 #[inline]
467 pub fn new_err(inner_index: u32) -> Self {
468 Self::encode(TAG_ERR, ARENA_REF_BIT | (inner_index as u64))
469 }
470
471 #[inline]
472 pub fn wrapper_index(self) -> u32 {
473 debug_assert!(
474 self.is_nan_boxed()
475 && matches!(self.tag(), TAG_SOME | TAG_OK | TAG_ERR)
476 && self.payload() & ARENA_REF_BIT != 0
477 );
478 self.arena_index()
479 }
480
481 #[inline]
482 pub fn wrapper_inner<T: ArenaTypes>(self, arena: &Arena<T>) -> NanValue {
483 self.wrapper_parts(arena)
484 .map(|(_, inner)| inner)
485 .expect("wrapper_inner() called on non-wrapper")
486 }
487
488 #[inline]
489 fn wrap_value<T: ArenaTypes>(kind: u64, inner: NanValue, arena: &mut Arena<T>) -> Self {
490 if let Some(payload) = inner.plain_immediate_payload() {
491 let tag = match kind {
492 WRAP_SOME => TAG_SOME,
493 WRAP_OK => TAG_OK,
494 WRAP_ERR => TAG_ERR,
495 _ => unreachable!("invalid wrapper kind"),
496 };
497 Self::new_inline_wrapper(tag, WRAPPER_INLINE_IMMEDIATE, payload)
498 } else if inner.is_none() {
499 let tag = match kind {
500 WRAP_SOME => TAG_SOME,
501 WRAP_OK => TAG_OK,
502 WRAP_ERR => TAG_ERR,
503 _ => unreachable!("invalid wrapper kind"),
504 };
505 Self::new_inline_wrapper(tag, WRAPPER_INLINE_NONE, 0)
506 } else if let Some(value) = inner.inline_int_value() {
507 if (WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&value) {
508 let tag = match kind {
509 WRAP_SOME => TAG_SOME,
510 WRAP_OK => TAG_OK,
511 WRAP_ERR => TAG_ERR,
512 _ => unreachable!("invalid wrapper kind"),
513 };
514 return Self::new_inline_wrapper(
515 tag,
516 WRAPPER_INLINE_INT,
517 Self::encode_wrapper_inline_int(value),
518 );
519 }
520 let idx = arena.push_boxed(inner);
521 match kind {
522 WRAP_SOME => Self::new_some(idx),
523 WRAP_OK => Self::new_ok(idx),
524 WRAP_ERR => Self::new_err(idx),
525 _ => unreachable!("invalid wrapper kind"),
526 }
527 } else {
528 let idx = arena.push_boxed(inner);
529 match kind {
530 WRAP_SOME => Self::new_some(idx),
531 WRAP_OK => Self::new_ok(idx),
532 WRAP_ERR => Self::new_err(idx),
533 _ => unreachable!("invalid wrapper kind"),
534 }
535 }
536 }
537
538 #[inline]
539 pub fn new_some_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
540 Self::wrap_value(WRAP_SOME, inner, arena)
541 }
542
543 #[inline]
544 pub fn new_ok_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
545 Self::wrap_value(WRAP_OK, inner, arena)
546 }
547
548 #[inline]
549 pub fn new_err_value<T: ArenaTypes>(inner: NanValue, arena: &mut Arena<T>) -> Self {
550 Self::wrap_value(WRAP_ERR, inner, arena)
551 }
552
553 #[inline]
556 pub fn new_string(arena_index: u32) -> Self {
557 Self::encode(TAG_STRING, STRING_ARENA_BIT | (arena_index as u64))
558 }
559
560 #[inline]
561 fn new_small_string_bytes(bytes: &[u8]) -> Self {
562 debug_assert!(bytes.len() <= STRING_INLINE_MAX_BYTES);
563 let mut payload = (bytes.len() as u64) << STRING_INLINE_LEN_SHIFT;
564 for (idx, byte) in bytes.iter().enumerate() {
565 payload |= (*byte as u64) << (idx * 8);
566 }
567 Self::encode(TAG_STRING, payload)
568 }
569
570 #[inline]
571 pub fn small_string(self) -> Option<NanString<'static>> {
572 if !self.is_nan_boxed()
573 || self.tag() != TAG_STRING
574 || self.payload() & STRING_ARENA_BIT != 0
575 {
576 return None;
577 }
578 let payload = self.payload();
579 let len = ((payload & STRING_INLINE_LEN_MASK) >> STRING_INLINE_LEN_SHIFT) as u8;
580 if len as usize > STRING_INLINE_MAX_BYTES {
581 return None;
582 }
583 let mut bytes = [0u8; STRING_INLINE_MAX_BYTES];
584 for (idx, slot) in bytes.iter_mut().take(len as usize).enumerate() {
585 *slot = ((payload >> (idx * 8)) & 0xFF) as u8;
586 }
587 Some(NanString::Inline { len, bytes })
588 }
589
590 #[inline]
591 pub fn new_string_value<T: ArenaTypes>(s: &str, arena: &mut Arena<T>) -> Self {
592 if s.len() <= STRING_INLINE_MAX_BYTES {
593 Self::new_small_string_bytes(s.as_bytes())
594 } else {
595 Self::new_string(arena.push_string(s))
596 }
597 }
598
599 #[inline]
600 pub fn new_list(arena_index: u32) -> Self {
601 Self::encode(TAG_LIST, ARENA_REF_BIT | (arena_index as u64))
602 }
603
604 #[inline]
605 pub fn new_tuple(arena_index: u32) -> Self {
606 Self::encode(TAG_TUPLE, ARENA_REF_BIT | (arena_index as u64))
607 }
608
609 #[inline]
610 pub fn new_map(arena_index: u32) -> Self {
611 Self::encode(TAG_MAP, ARENA_REF_BIT | (arena_index as u64))
612 }
613
614 #[inline]
615 pub fn new_vector(arena_index: u32) -> Self {
616 Self::encode(TAG_VECTOR, ARENA_REF_BIT | (arena_index as u64))
617 }
618
619 #[inline]
620 pub fn new_record(arena_index: u32) -> Self {
621 Self::encode(TAG_RECORD, ARENA_REF_BIT | (arena_index as u64))
622 }
623
624 #[inline]
625 pub fn new_variant(arena_index: u32) -> Self {
626 Self::encode(TAG_VARIANT, ARENA_REF_BIT | (arena_index as u64))
627 }
628
629 #[inline]
630 fn new_symbol(symbol_kind: u64, symbol_index: u32) -> Self {
631 Self::encode(TAG_SYMBOL, symbol_kind | ((symbol_index as u64) << 2))
632 }
633
634 #[inline]
635 pub fn symbol_kind(self) -> u64 {
636 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
637 self.payload() & SYMBOL_KIND_MASK
638 }
639
640 #[inline]
641 pub fn symbol_index(self) -> u32 {
642 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
643 (self.payload() >> 2) as u32
644 }
645
646 #[inline]
647 pub fn new_nullary_variant(symbol_index: u32) -> Self {
648 Self::new_symbol(SYMBOL_NULLARY_VARIANT, symbol_index)
649 }
650
651 #[inline]
652 pub fn try_new_inline_variant(ctor_id: u32, inner: NanValue) -> Option<Self> {
653 if ctor_id > IV_CTOR_MASK as u32 {
654 return None;
655 }
656 let ctor_bits = (ctor_id as u64) << IV_CTOR_SHIFT;
657
658 if inner.is_nan_boxed() {
659 match inner.tag() {
660 TAG_INT if inner.payload() & INT_BIG_BIT == 0 => {
661 let i = Self::decode_inline_int_payload(inner.payload());
662 if (IV_INT_MIN..=IV_INT_MAX).contains(&i) {
663 let int_bits = (i as u64) & IV_INT_MASK;
664 return Some(Self::encode(TAG_INLINE_VARIANT, ctor_bits | int_bits));
665 }
666 }
667 TAG_IMMEDIATE => {
668 let imm = match inner.payload() {
669 IMM_FALSE => IV_IMM_FALSE,
670 IMM_TRUE => IV_IMM_TRUE,
671 IMM_UNIT => IV_IMM_UNIT,
672 _ => return None,
673 };
674 return Some(Self::encode(
675 TAG_INLINE_VARIANT,
676 ctor_bits | IV_KIND_BIT | (imm << IV_IMM_SHIFT),
677 ));
678 }
679 TAG_NONE => {
680 return Some(Self::encode(
681 TAG_INLINE_VARIANT,
682 ctor_bits | IV_KIND_BIT | (IV_IMM_NONE << IV_IMM_SHIFT),
683 ));
684 }
685 _ => {}
686 }
687 }
688 None
689 }
690
691 #[inline]
692 pub fn inline_variant_ctor_id(self) -> u32 {
693 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT);
694 ((self.payload() >> IV_CTOR_SHIFT) & IV_CTOR_MASK) as u32
695 }
696
697 #[inline]
698 pub fn inline_variant_inner(self) -> NanValue {
699 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT);
700 let payload = self.payload();
701 if payload & IV_KIND_BIT == 0 {
702 let raw = payload & IV_INT_MASK;
703 let i = if raw & IV_INT_SIGN_BIT != 0 {
704 (raw | !IV_INT_MASK) as i64
705 } else {
706 raw as i64
707 };
708 Self::new_int_inline(i)
709 } else {
710 let imm = (payload >> IV_IMM_SHIFT) & 0b11;
711 match imm {
712 IV_IMM_FALSE => Self::FALSE,
713 IV_IMM_TRUE => Self::TRUE,
714 IV_IMM_UNIT => Self::UNIT,
715 IV_IMM_NONE => Self::NONE,
716 _ => unreachable!(),
717 }
718 }
719 }
720
721 #[inline]
722 pub fn new_fn(arena_index: u32) -> Self {
723 Self::new_symbol(SYMBOL_FN, arena_index)
724 }
725
726 #[inline]
727 pub fn new_builtin(arena_index: u32) -> Self {
728 Self::new_symbol(SYMBOL_BUILTIN, arena_index)
729 }
730
731 #[inline]
732 pub fn new_namespace(arena_index: u32) -> Self {
733 Self::new_symbol(SYMBOL_NAMESPACE, arena_index)
734 }
735
736 #[inline]
737 pub fn arena_index(self) -> u32 {
738 (self.payload() & !ARENA_REF_BIT) as u32
739 }
740
741 #[inline]
742 pub fn heap_index(self) -> Option<u32> {
743 if !self.is_nan_boxed() {
744 return None;
745 }
746 match self.tag() {
747 TAG_INT => {
748 let p = self.payload();
749 if p & INT_BIG_BIT != 0 {
750 Some((p & !INT_BIG_BIT) as u32)
751 } else {
752 None
753 }
754 }
755 TAG_STRING | TAG_SOME | TAG_OK | TAG_ERR | TAG_LIST | TAG_TUPLE | TAG_MAP
756 | TAG_RECORD | TAG_VARIANT | TAG_VECTOR => {
757 (self.payload() & ARENA_REF_BIT != 0).then_some(self.arena_index())
758 }
759 _ => None,
760 }
761 }
762
763 #[inline]
764 pub fn with_heap_index(self, index: u32) -> Self {
765 if !self.is_nan_boxed() {
766 return self;
767 }
768 match self.tag() {
769 TAG_INT => {
770 debug_assert!(self.payload() & INT_BIG_BIT != 0);
771 Self::new_int_arena(index)
772 }
773 TAG_SOME => Self::new_some(index),
774 TAG_OK => Self::new_ok(index),
775 TAG_ERR => Self::new_err(index),
776 TAG_STRING => Self::new_string(index),
777 TAG_LIST => Self::new_list(index),
778 TAG_TUPLE => Self::new_tuple(index),
779 TAG_MAP => Self::new_map(index),
780 TAG_VECTOR => Self::new_vector(index),
781 TAG_RECORD => Self::new_record(index),
782 TAG_VARIANT => Self::new_variant(index),
783 _ => self,
784 }
785 }
786
787 #[inline]
790 pub fn is_float(self) -> bool {
791 !self.is_nan_boxed()
792 }
793
794 #[inline]
795 pub fn is_int(self) -> bool {
796 self.is_nan_boxed() && self.tag() == TAG_INT
797 }
798
799 #[inline]
800 pub fn is_bool(self) -> bool {
801 self.is_nan_boxed()
802 && self.tag() == TAG_IMMEDIATE
803 && (self.payload() == IMM_TRUE || self.payload() == IMM_FALSE)
804 }
805
806 #[inline]
807 pub fn is_unit(self) -> bool {
808 self.0 == Self::UNIT.0
809 }
810
811 #[inline]
812 pub fn is_none(self) -> bool {
813 self.0 == Self::NONE.0
814 }
815
816 #[inline]
817 pub fn is_some(self) -> bool {
818 self.is_nan_boxed() && self.tag() == TAG_SOME
819 }
820
821 #[inline]
822 pub fn is_ok(self) -> bool {
823 self.is_nan_boxed() && self.tag() == TAG_OK
824 }
825
826 #[inline]
827 pub fn is_err(self) -> bool {
828 self.is_nan_boxed() && self.tag() == TAG_ERR
829 }
830
831 #[inline]
832 pub fn is_string(self) -> bool {
833 self.is_nan_boxed() && self.tag() == TAG_STRING
834 }
835
836 pub fn string_eq<T: ArenaTypes>(self, other: NanValue, arena: &Arena<T>) -> bool {
837 if self.bits() == other.bits() {
838 return true;
839 }
840 if !self.is_string() || !other.is_string() {
841 return false;
842 }
843 arena.get_string_value(self).as_str() == arena.get_string_value(other).as_str()
844 }
845
846 #[inline]
847 pub fn is_list(self) -> bool {
848 self.is_nan_boxed() && self.tag() == TAG_LIST
849 }
850
851 #[inline]
852 pub fn is_record(self) -> bool {
853 self.is_nan_boxed() && self.tag() == TAG_RECORD
854 }
855
856 #[inline]
857 pub fn is_fn(self) -> bool {
858 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_FN
859 }
860
861 #[inline]
862 pub fn is_variant(self) -> bool {
863 self.is_nan_boxed()
864 && (self.tag() == TAG_VARIANT
865 || self.tag() == TAG_INLINE_VARIANT
866 || (self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NULLARY_VARIANT))
867 }
868
869 #[inline]
870 pub fn is_inline_variant(self) -> bool {
871 self.is_nan_boxed() && self.tag() == TAG_INLINE_VARIANT
872 }
873
874 #[inline]
875 pub fn is_map(self) -> bool {
876 self.is_nan_boxed() && self.tag() == TAG_MAP
877 }
878
879 #[inline]
880 pub fn is_vector(self) -> bool {
881 self.is_nan_boxed() && self.tag() == TAG_VECTOR
882 }
883
884 #[inline]
885 pub fn is_empty_vector_immediate(self) -> bool {
886 self.is_nan_boxed() && self.tag() == TAG_VECTOR && self.payload() & ARENA_REF_BIT == 0
887 }
888
889 #[inline]
890 pub fn is_tuple(self) -> bool {
891 self.is_nan_boxed() && self.tag() == TAG_TUPLE
892 }
893
894 #[inline]
895 pub fn is_builtin(self) -> bool {
896 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_BUILTIN
897 }
898
899 #[inline]
900 pub fn is_namespace(self) -> bool {
901 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NAMESPACE
902 }
903
904 #[inline]
905 pub fn is_empty_list_immediate(self) -> bool {
906 self.is_nan_boxed() && self.tag() == TAG_LIST && self.payload() & ARENA_REF_BIT == 0
907 }
908
909 #[inline]
910 pub fn is_empty_map_immediate(self) -> bool {
911 self.is_nan_boxed() && self.tag() == TAG_MAP && self.payload() & ARENA_REF_BIT == 0
912 }
913
914 pub fn type_name(self) -> &'static str {
915 if self.is_float() {
916 return "Float";
917 }
918 match self.tag() {
919 TAG_INT => "Int",
920 TAG_IMMEDIATE => match self.payload() {
921 IMM_FALSE | IMM_TRUE => "Bool",
922 IMM_UNIT => "Unit",
923 _ => "Unknown",
924 },
925 TAG_SOME => "Option.Some",
926 TAG_NONE => "Option.None",
927 TAG_OK => "Result.Ok",
928 TAG_ERR => "Result.Err",
929 TAG_STRING => "String",
930 TAG_LIST => "List",
931 TAG_TUPLE => "Tuple",
932 TAG_MAP => "Map",
933 TAG_VECTOR => "Vector",
934 TAG_RECORD => "Record",
935 TAG_VARIANT | TAG_INLINE_VARIANT => "Variant",
936 TAG_SYMBOL => match self.symbol_kind() {
937 SYMBOL_FN => "Fn",
938 SYMBOL_BUILTIN => "Builtin",
939 SYMBOL_NAMESPACE => "Namespace",
940 SYMBOL_NULLARY_VARIANT => "Variant",
941 _ => "Unknown",
942 },
943 _ => "Unknown",
944 }
945 }
946
947 #[inline]
948 pub fn variant_ctor_id<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<u32> {
949 if !self.is_nan_boxed() {
950 return None;
951 }
952 match self.tag() {
953 TAG_INLINE_VARIANT => Some(self.inline_variant_ctor_id()),
954 TAG_VARIANT => {
955 let (type_id, variant_id, _) = arena.get_variant(self.arena_index());
956 arena.find_ctor_id(type_id, variant_id)
957 }
958 TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
959 Some(arena.get_nullary_variant_ctor(self.symbol_index()))
960 }
961 _ => None,
962 }
963 }
964
965 #[inline]
966 pub fn variant_parts<T: ArenaTypes>(self, arena: &Arena<T>) -> Option<(u32, u16, &[NanValue])> {
967 if !self.is_nan_boxed() {
968 return None;
969 }
970 match self.tag() {
971 TAG_VARIANT => {
972 let (type_id, variant_id, fields) = arena.get_variant(self.arena_index());
973 Some((type_id, variant_id, fields))
974 }
975 TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
976 let (type_id, variant_id) =
977 arena.get_ctor_parts(arena.get_nullary_variant_ctor(self.symbol_index()));
978 Some((type_id, variant_id, &[]))
979 }
980 _ => None,
981 }
982 }
983
984 #[inline]
985 pub fn variant_single_field<T: ArenaTypes>(self, arena: &Arena<T>) -> NanValue {
986 if self.tag() == TAG_INLINE_VARIANT {
987 self.inline_variant_inner()
988 } else {
989 let (_, _, fields) = arena.get_variant(self.arena_index());
990 debug_assert_eq!(fields.len(), 1);
991 fields[0]
992 }
993 }
994
995 #[inline]
996 pub fn inline_variant_info<T: ArenaTypes>(
997 self,
998 arena: &Arena<T>,
999 ) -> Option<(u32, u16, NanValue)> {
1000 if !self.is_nan_boxed() || self.tag() != TAG_INLINE_VARIANT {
1001 return None;
1002 }
1003 let ctor_id = self.inline_variant_ctor_id();
1004 let (type_id, variant_id) = arena.get_ctor_parts(ctor_id);
1005 Some((type_id, variant_id, self.inline_variant_inner()))
1006 }
1007
1008 #[inline]
1009 pub fn bits(self) -> u64 {
1010 self.0
1011 }
1012
1013 #[inline]
1014 pub fn from_bits(bits: u64) -> Self {
1015 NanValue(bits)
1016 }
1017
1018 pub fn map_key_hash<T: ArenaTypes>(self, arena: &Arena<T>) -> u64 {
1019 if self.is_string() {
1020 use core::hash::{Hash, Hasher};
1021 let mut hasher = DefaultHasher::new();
1022 3u8.hash(&mut hasher);
1023 arena.get_string_value(self).hash(&mut hasher);
1024 hasher.finish()
1025 } else {
1026 self.bits()
1027 }
1028 }
1029
1030 pub fn map_key_hash_deep<T: ArenaTypes>(self, arena: &Arena<T>) -> u64 {
1035 use core::hash::Hasher;
1036 let mut hasher = DefaultHasher::new();
1037 self.hash_in(&mut hasher, arena);
1038 hasher.finish()
1039 }
1040}
1041
1042impl core::fmt::Debug for NanValue {
1045 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1046 if self.is_float() {
1047 return write!(f, "Float({})", self.as_float());
1048 }
1049 match self.tag() {
1050 TAG_INT => {
1051 if self.payload() & INT_BIG_BIT != 0 {
1052 write!(f, "Int(arena:{})", (self.payload() & !INT_BIG_BIT) as u32)
1053 } else {
1054 write!(
1055 f,
1056 "Int({})",
1057 Self::decode_inline_int_payload(self.payload())
1058 )
1059 }
1060 }
1061 TAG_IMMEDIATE => match self.payload() {
1062 IMM_FALSE => write!(f, "False"),
1063 IMM_TRUE => write!(f, "True"),
1064 IMM_UNIT => write!(f, "Unit"),
1065 _ => write!(f, "Immediate({})", self.payload()),
1066 },
1067 TAG_NONE => write!(f, "None"),
1068 TAG_SOME | TAG_OK | TAG_ERR => {
1069 let kind = match self.tag() {
1070 TAG_SOME => "Some",
1071 TAG_OK => "Ok",
1072 TAG_ERR => "Err",
1073 _ => "?",
1074 };
1075 if self.payload() & ARENA_REF_BIT != 0 {
1076 write!(f, "{}(arena:{})", kind, self.arena_index())
1077 } else if let Some(inner) = self.wrapper_inline_inner() {
1078 write!(f, "{}({:?})", kind, inner)
1079 } else {
1080 write!(f, "{}(?)", kind)
1081 }
1082 }
1083 TAG_SYMBOL => match self.symbol_kind() {
1084 SYMBOL_FN => write!(f, "Fn(symbol:{})", self.symbol_index()),
1085 SYMBOL_BUILTIN => write!(f, "Builtin(symbol:{})", self.symbol_index()),
1086 SYMBOL_NAMESPACE => write!(f, "Namespace(symbol:{})", self.symbol_index()),
1087 SYMBOL_NULLARY_VARIANT => {
1088 write!(f, "NullaryVariant(symbol:{})", self.symbol_index())
1089 }
1090 _ => write!(f, "Symbol({})", self.payload()),
1091 },
1092 TAG_STRING => {
1093 if let Some(s) = self.small_string() {
1094 write!(f, "String({:?})", s.as_str())
1095 } else {
1096 write!(f, "String(arena:{})", self.arena_index())
1097 }
1098 }
1099 TAG_INLINE_VARIANT => {
1100 let ctor = self.inline_variant_ctor_id();
1101 let inner = self.inline_variant_inner();
1102 write!(f, "InlineVariant(ctor:{}, {:?})", ctor, inner)
1103 }
1104 TAG_LIST if self.is_empty_list_immediate() => write!(f, "EmptyList"),
1105 TAG_MAP if self.is_empty_map_immediate() => write!(f, "EmptyMap"),
1106 TAG_VECTOR if self.is_empty_vector_immediate() => write!(f, "EmptyVector"),
1107 _ => write!(f, "{}(arena:{})", self.type_name(), self.arena_index()),
1108 }
1109 }
1110}
1111
1112struct DefaultHasher {
1119 #[cfg(feature = "std")]
1120 inner: std::collections::hash_map::DefaultHasher,
1121 #[cfg(not(feature = "std"))]
1122 state: u64,
1123}
1124
1125impl DefaultHasher {
1126 fn new() -> Self {
1127 #[cfg(feature = "std")]
1128 {
1129 Self {
1130 inner: std::collections::hash_map::DefaultHasher::new(),
1131 }
1132 }
1133 #[cfg(not(feature = "std"))]
1134 {
1135 Self {
1136 state: 0xcbf29ce484222325,
1137 }
1138 }
1139 }
1140}
1141
1142impl Hasher for DefaultHasher {
1143 #[cfg(feature = "std")]
1144 fn finish(&self) -> u64 {
1145 self.inner.finish()
1146 }
1147 #[cfg(feature = "std")]
1148 fn write(&mut self, bytes: &[u8]) {
1149 self.inner.write(bytes)
1150 }
1151
1152 #[cfg(not(feature = "std"))]
1153 fn finish(&self) -> u64 {
1154 self.state
1155 }
1156 #[cfg(not(feature = "std"))]
1157 fn write(&mut self, bytes: &[u8]) {
1158 for &b in bytes {
1159 self.state ^= b as u64;
1160 self.state = self.state.wrapping_mul(0x100000001b3);
1161 }
1162 }
1163}
1164
1165#[derive(Debug, Clone)]
1170pub struct Arena<T: ArenaTypes> {
1171 young_entries: Vec<ArenaEntry<T>>,
1172 yard_entries: Vec<ArenaEntry<T>>,
1173 handoff_entries: Vec<ArenaEntry<T>>,
1174 stable_entries: Vec<ArenaEntry<T>>,
1175 scratch_young: Vec<u32>,
1176 scratch_yard: Vec<u32>,
1177 scratch_handoff: Vec<u32>,
1178 scratch_stable: Vec<u32>,
1179 peak_usage: ArenaUsage,
1180 alloc_space: AllocSpace,
1181 pub type_names: Vec<String>,
1182 pub type_field_names: Vec<Vec<String>>,
1183 pub type_variant_names: Vec<Vec<String>>,
1184 pub type_variant_ctor_ids: Vec<Vec<u32>>,
1185 pub ctor_to_type_variant: Vec<(u32, u16)>,
1186 pub symbol_entries: Vec<ArenaSymbol<T>>,
1187 pub type_aliases: Vec<(String, u32)>,
1189}
1190
1191#[derive(Debug, Clone)]
1192pub enum ArenaEntry<T: ArenaTypes> {
1193 Int(i64),
1194 String(Rc<str>),
1195 List(ArenaList),
1196 Tuple(Vec<NanValue>),
1197 Map(T::Map),
1198 Vector(Vec<NanValue>),
1199 Record {
1200 type_id: u32,
1201 fields: Vec<NanValue>,
1202 },
1203 Variant {
1204 type_id: u32,
1205 variant_id: u16,
1206 fields: Vec<NanValue>,
1207 },
1208 Fn(Rc<T::Fn>),
1209 Builtin(Rc<str>),
1210 Namespace {
1211 name: Rc<str>,
1212 members: Vec<(Rc<str>, NanValue)>,
1213 },
1214 Boxed(NanValue),
1215}
1216
1217#[derive(Debug, Clone)]
1218pub enum ArenaSymbol<T: ArenaTypes> {
1219 Fn(Rc<T::Fn>),
1220 Builtin(Rc<str>),
1221 Namespace {
1222 name: Rc<str>,
1223 members: Vec<(Rc<str>, NanValue)>,
1224 },
1225 NullaryVariant {
1226 ctor_id: u32,
1227 },
1228}
1229
1230#[derive(Debug, Clone)]
1231pub enum ArenaList {
1232 Flat {
1233 items: Rc<Vec<NanValue>>,
1234 start: usize,
1235 },
1236 Prepend {
1237 head: NanValue,
1238 tail: NanValue,
1239 len: usize,
1240 },
1241 Concat {
1242 left: NanValue,
1243 right: NanValue,
1244 len: usize,
1245 },
1246 Segments {
1247 current: NanValue,
1248 rest: Rc<Vec<NanValue>>,
1249 start: usize,
1250 len: usize,
1251 },
1252}
1253
1254const LIST_APPEND_CHUNK_LIMIT: usize = 128;
1255
1256#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1257pub(crate) enum HeapSpace {
1258 Young = 0,
1259 Yard = 1,
1260 Handoff = 2,
1261 Stable = 3,
1262}
1263
1264#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1265pub enum AllocSpace {
1266 Young,
1267 Yard,
1268 Handoff,
1269}
1270
1271#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
1272pub struct ArenaUsage {
1273 pub young: usize,
1274 pub yard: usize,
1275 pub handoff: usize,
1276 pub stable: usize,
1277}
1278
1279impl ArenaUsage {
1280 pub fn total(self) -> usize {
1281 self.young + self.yard + self.handoff + self.stable
1282 }
1283}
1284
1285pub(crate) const HEAP_SPACE_SHIFT: u32 = 30;
1286pub(crate) const HEAP_SPACE_MASK_U32: u32 = 0b11 << HEAP_SPACE_SHIFT;
1287pub(crate) const HEAP_INDEX_MASK_U32: u32 = (1 << HEAP_SPACE_SHIFT) - 1;
1288
1289mod arena;
1290mod compare;
1291mod lists;
1292mod memory;
1293
1294#[cfg(feature = "runtime")]
1299pub type PersistentMap = aver_rt::AverMap<u64, (NanValue, NanValue)>;
1301
1302#[cfg(feature = "runtime")]
1303impl MapLike for aver_rt::AverMap<u64, (NanValue, NanValue)> {
1304 fn new() -> Self {
1305 aver_rt::AverMap::new()
1306 }
1307
1308 fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)> {
1309 aver_rt::AverMap::get(self, key)
1310 }
1311
1312 fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self {
1313 aver_rt::AverMap::insert(self, key, value)
1314 }
1315
1316 fn insert_owned(self, key: u64, value: (NanValue, NanValue)) -> Self {
1317 aver_rt::AverMap::insert_owned(self, key, value)
1318 }
1319
1320 fn rewrite_values_mut(&mut self, f: impl FnMut(&mut (NanValue, NanValue))) {
1321 self.rewrite_values_in_place(f)
1322 }
1323
1324 fn len(&self) -> usize {
1325 aver_rt::AverMap::len(self)
1326 }
1327
1328 fn is_empty(&self) -> bool {
1329 aver_rt::AverMap::is_empty(self)
1330 }
1331
1332 fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))> {
1333 aver_rt::AverMap::iter(self)
1334 }
1335
1336 fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)> {
1337 aver_rt::AverMap::values(self)
1338 }
1339}
1340
1341#[cfg(not(feature = "runtime"))]
1346#[derive(Clone, Debug)]
1348pub struct PersistentMap(alloc::collections::BTreeMap<u64, (NanValue, NanValue)>);
1349
1350#[cfg(not(feature = "runtime"))]
1351impl MapLike for PersistentMap {
1352 fn new() -> Self {
1353 PersistentMap(alloc::collections::BTreeMap::new())
1354 }
1355
1356 fn get(&self, key: &u64) -> Option<&(NanValue, NanValue)> {
1357 self.0.get(key)
1358 }
1359
1360 fn insert(&self, key: u64, value: (NanValue, NanValue)) -> Self {
1361 let mut m = self.0.clone();
1362 m.insert(key, value);
1363 PersistentMap(m)
1364 }
1365
1366 fn rewrite_values_mut(&mut self, mut f: impl FnMut(&mut (NanValue, NanValue))) {
1367 for value in self.0.values_mut() {
1368 f(value);
1369 }
1370 }
1371
1372 fn len(&self) -> usize {
1373 self.0.len()
1374 }
1375
1376 fn is_empty(&self) -> bool {
1377 self.0.is_empty()
1378 }
1379
1380 fn iter(&self) -> impl Iterator<Item = (&u64, &(NanValue, NanValue))> {
1381 self.0.iter()
1382 }
1383
1384 fn values(&self) -> impl Iterator<Item = &(NanValue, NanValue)> {
1385 self.0.values()
1386 }
1387}