1use std::cmp::Ordering;
37use std::hash::{Hash, Hasher};
38use std::ops::Deref;
39use std::rc::Rc;
40
41use crate::value::FunctionValue;
42
43pub type PersistentMap = aver_rt::AverMap<u64, (NanValue, NanValue)>;
45
46const QNAN: u64 = 0x7FFC_0000_0000_0000;
51const QNAN_MASK: u64 = 0xFFFC_0000_0000_0000;
52const TAG_SHIFT: u32 = 46;
53const TAG_MASK: u64 = 0xF;
54const PAYLOAD_MASK: u64 = (1u64 << 46) - 1;
55
56const TAG_IMMEDIATE: u64 = 0;
57const TAG_SYMBOL: u64 = 1;
58const TAG_INT: u64 = 2;
59const TAG_STRING: u64 = 3;
60const TAG_SOME: u64 = 4;
61const TAG_NONE: u64 = 5;
62const TAG_OK: u64 = 6;
63const TAG_ERR: u64 = 7;
64const TAG_LIST: u64 = 8;
65const TAG_VECTOR: u64 = 9; const TAG_MAP: u64 = 10;
67const TAG_RECORD: u64 = 11;
68const TAG_VARIANT: u64 = 12;
69const TAG_TUPLE: u64 = 13;
70
71const SYMBOL_FN: u64 = 0;
72const SYMBOL_BUILTIN: u64 = 1;
73const SYMBOL_NAMESPACE: u64 = 2;
74const SYMBOL_NULLARY_VARIANT: u64 = 3;
75const SYMBOL_KIND_MASK: u64 = 0b11;
76
77const IMM_FALSE: u64 = 0;
78const IMM_TRUE: u64 = 1;
79const IMM_UNIT: u64 = 2;
80
81const WRAP_SOME: u64 = 0;
82const WRAP_OK: u64 = 1;
83const WRAP_ERR: u64 = 2;
84const WRAPPER_INLINE_KIND_SHIFT: u32 = 43;
85const WRAPPER_INLINE_KIND_MASK: u64 = 0b11 << WRAPPER_INLINE_KIND_SHIFT;
86const WRAPPER_INLINE_PAYLOAD_MASK: u64 = (1u64 << WRAPPER_INLINE_KIND_SHIFT) - 1;
87const WRAPPER_INLINE_IMMEDIATE: u64 = 0;
88const WRAPPER_INLINE_INT: u64 = 1;
89const WRAPPER_INLINE_NONE: u64 = 2;
90const WRAPPER_INT_INLINE_MASK: u64 = WRAPPER_INLINE_PAYLOAD_MASK;
91const WRAPPER_INT_INLINE_MAX: i64 = (1i64 << 42) - 1;
92const WRAPPER_INT_INLINE_MIN: i64 = -(1i64 << 42);
93
94const ARENA_REF_BIT: u64 = 1u64 << 45;
95const INT_BIG_BIT: u64 = ARENA_REF_BIT;
96const INT_INLINE_MASK: u64 = (1u64 << 45) - 1;
97const INT_INLINE_MAX: i64 = (1i64 << 44) - 1;
98const INT_INLINE_MIN: i64 = -(1i64 << 44);
99
100const STRING_ARENA_BIT: u64 = ARENA_REF_BIT;
101const STRING_INLINE_LEN_SHIFT: u32 = 40;
102const STRING_INLINE_LEN_MASK: u64 = 0b111 << STRING_INLINE_LEN_SHIFT;
103const STRING_INLINE_MAX_BYTES: usize = 5;
104
105#[derive(Clone, Copy)]
110pub struct NanValue(u64);
111
112#[derive(Clone, Copy, Debug)]
113pub enum NanString<'a> {
114 Borrowed(&'a str),
115 Inline {
116 len: u8,
117 bytes: [u8; STRING_INLINE_MAX_BYTES],
118 },
119}
120
121impl<'a> NanString<'a> {
122 #[inline]
123 pub fn as_str(&self) -> &str {
124 match self {
125 NanString::Borrowed(s) => s,
126 NanString::Inline { len, bytes } => std::str::from_utf8(&bytes[..*len as usize])
127 .expect("NanString inline payload must be valid UTF-8"),
128 }
129 }
130}
131
132impl Deref for NanString<'_> {
133 type Target = str;
134
135 #[inline]
136 fn deref(&self) -> &Self::Target {
137 self.as_str()
138 }
139}
140
141impl PartialEq for NanString<'_> {
142 #[inline]
143 fn eq(&self, other: &Self) -> bool {
144 self.as_str() == other.as_str()
145 }
146}
147
148impl Eq for NanString<'_> {}
149
150impl PartialEq<&str> for NanString<'_> {
151 #[inline]
152 fn eq(&self, other: &&str) -> bool {
153 self.as_str() == *other
154 }
155}
156
157impl PartialEq<NanString<'_>> for &str {
158 #[inline]
159 fn eq(&self, other: &NanString<'_>) -> bool {
160 *self == other.as_str()
161 }
162}
163
164impl PartialOrd for NanString<'_> {
165 #[inline]
166 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
167 Some(self.cmp(other))
168 }
169}
170
171impl Ord for NanString<'_> {
172 #[inline]
173 fn cmp(&self, other: &Self) -> Ordering {
174 self.as_str().cmp(other.as_str())
175 }
176}
177
178impl Hash for NanString<'_> {
179 #[inline]
180 fn hash<H: Hasher>(&self, state: &mut H) {
181 self.as_str().hash(state);
182 }
183}
184
185impl std::fmt::Display for NanString<'_> {
186 #[inline]
187 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
188 f.write_str(self.as_str())
189 }
190}
191
192impl NanValue {
195 #[inline]
196 fn decode_inline_int_payload(payload: u64) -> i64 {
197 debug_assert!(payload & INT_BIG_BIT == 0);
198 let raw = payload & INT_INLINE_MASK;
199 if raw & (1u64 << 44) != 0 {
200 (raw | !INT_INLINE_MASK) as i64
201 } else {
202 raw as i64
203 }
204 }
205
206 #[inline]
207 fn encode(tag: u64, payload: u64) -> Self {
208 debug_assert!(tag <= TAG_MASK);
209 debug_assert!(payload <= PAYLOAD_MASK);
210 NanValue(QNAN | (tag << TAG_SHIFT) | payload)
211 }
212
213 #[inline]
214 fn is_nan_boxed(self) -> bool {
215 (self.0 & QNAN_MASK) == QNAN
216 }
217
218 #[inline]
219 fn tag(self) -> u64 {
220 (self.0 >> TAG_SHIFT) & TAG_MASK
221 }
222
223 #[inline]
224 fn payload(self) -> u64 {
225 self.0 & PAYLOAD_MASK
226 }
227
228 #[inline]
231 pub fn new_float(f: f64) -> Self {
232 let bits = f.to_bits();
233 if (bits & QNAN_MASK) == QNAN {
234 NanValue(bits ^ 1)
235 } else {
236 NanValue(bits)
237 }
238 }
239
240 #[inline]
241 pub fn as_float(self) -> f64 {
242 f64::from_bits(self.0)
243 }
244
245 #[inline]
246 pub fn new_int_inline(i: i64) -> Self {
247 debug_assert!((INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i));
248 let payload = (i as u64) & INT_INLINE_MASK;
249 Self::encode(TAG_INT, payload)
250 }
251
252 #[inline]
253 pub fn new_int_arena(arena_index: u32) -> Self {
254 Self::encode(TAG_INT, INT_BIG_BIT | (arena_index as u64))
255 }
256
257 #[inline]
258 pub fn new_int(i: i64, arena: &mut Arena) -> Self {
259 if (INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i) {
260 Self::new_int_inline(i)
261 } else {
262 let idx = arena.push_i64(i);
263 Self::new_int_arena(idx)
264 }
265 }
266
267 #[inline]
268 pub fn as_int(self, arena: &Arena) -> i64 {
269 let p = self.payload();
270 if p & INT_BIG_BIT != 0 {
271 let idx = (p & !INT_BIG_BIT) as u32;
272 arena.get_i64(idx)
273 } else {
274 Self::decode_inline_int_payload(p)
275 }
276 }
277
278 #[inline]
279 fn inline_int_payload(self) -> Option<u64> {
280 (self.is_nan_boxed() && self.tag() == TAG_INT && self.payload() & INT_BIG_BIT == 0)
281 .then_some(self.payload())
282 }
283
284 #[inline]
285 pub fn inline_int_value(self) -> Option<i64> {
286 self.inline_int_payload()
287 .map(Self::decode_inline_int_payload)
288 }
289
290 pub const FALSE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_FALSE);
293 pub const TRUE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_TRUE);
294 pub const UNIT: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_UNIT);
295 pub const NONE: NanValue = NanValue(QNAN | (TAG_NONE << TAG_SHIFT));
296 pub const EMPTY_LIST: NanValue = NanValue(QNAN | (TAG_LIST << TAG_SHIFT));
297 pub const EMPTY_MAP: NanValue = NanValue(QNAN | (TAG_MAP << TAG_SHIFT));
298 pub const EMPTY_VECTOR: NanValue = NanValue(QNAN | (TAG_VECTOR << TAG_SHIFT));
299 pub const EMPTY_STRING: NanValue = NanValue(QNAN | (TAG_STRING << TAG_SHIFT));
300
301 #[inline]
302 pub fn new_bool(b: bool) -> Self {
303 if b { Self::TRUE } else { Self::FALSE }
304 }
305
306 #[inline]
307 pub fn as_bool(self) -> bool {
308 self.0 == Self::TRUE.0
309 }
310
311 #[inline]
312 fn plain_immediate_payload(self) -> Option<u64> {
313 (self.is_nan_boxed() && self.tag() == TAG_IMMEDIATE && self.payload() <= IMM_UNIT)
314 .then_some(self.payload())
315 }
316
317 #[inline]
318 fn wrapper_kind(self) -> u64 {
319 match self.tag() {
320 TAG_SOME => WRAP_SOME,
321 TAG_OK => WRAP_OK,
322 TAG_ERR => WRAP_ERR,
323 _ => panic!("wrapper_kind() called on non-wrapper"),
324 }
325 }
326
327 #[inline]
328 fn wrapper_inline_kind(self) -> Option<u64> {
329 if !self.is_nan_boxed() {
330 return None;
331 }
332 match self.tag() {
333 TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT == 0 => {
334 Some((self.payload() & WRAPPER_INLINE_KIND_MASK) >> WRAPPER_INLINE_KIND_SHIFT)
335 }
336 _ => None,
337 }
338 }
339
340 #[inline]
341 fn decode_wrapper_inline_int_payload(payload: u64) -> i64 {
342 let raw = payload & WRAPPER_INT_INLINE_MASK;
343 if raw & (1u64 << 42) != 0 {
344 (raw | !WRAPPER_INT_INLINE_MASK) as i64
345 } else {
346 raw as i64
347 }
348 }
349
350 #[inline]
351 fn encode_wrapper_inline_int(i: i64) -> u64 {
352 debug_assert!((WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&i));
353 (i as u64) & WRAPPER_INT_INLINE_MASK
354 }
355
356 #[inline]
357 fn wrapper_inline_inner(self) -> Option<NanValue> {
358 let kind = self.wrapper_inline_kind()?;
359 let payload = self.payload() & WRAPPER_INLINE_PAYLOAD_MASK;
360 match kind {
361 WRAPPER_INLINE_IMMEDIATE => Some(Self::encode(TAG_IMMEDIATE, payload)),
362 WRAPPER_INLINE_INT => Some(Self::new_int_inline(
363 Self::decode_wrapper_inline_int_payload(payload),
364 )),
365 WRAPPER_INLINE_NONE => Some(Self::NONE),
366 _ => None,
367 }
368 }
369
370 #[inline]
371 fn new_inline_wrapper(tag: u64, inline_kind: u64, payload: u64) -> Self {
372 debug_assert!(matches!(tag, TAG_SOME | TAG_OK | TAG_ERR));
373 debug_assert!(inline_kind <= WRAPPER_INLINE_NONE);
374 debug_assert!(payload <= WRAPPER_INLINE_PAYLOAD_MASK);
375 Self::encode(tag, (inline_kind << WRAPPER_INLINE_KIND_SHIFT) | payload)
376 }
377
378 #[inline]
379 fn wrapper_parts(self, arena: &Arena) -> Option<(u64, NanValue)> {
380 if !self.is_nan_boxed() {
381 return None;
382 }
383 match self.tag() {
384 TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT != 0 => {
385 Some((self.wrapper_kind(), arena.get_boxed(self.arena_index())))
386 }
387 TAG_SOME | TAG_OK | TAG_ERR => self
388 .wrapper_inline_inner()
389 .map(|inner| (self.wrapper_kind(), inner)),
390 _ => None,
391 }
392 }
393
394 #[inline]
397 pub fn new_some(inner_index: u32) -> Self {
398 Self::encode(TAG_SOME, ARENA_REF_BIT | (inner_index as u64))
399 }
400
401 #[inline]
402 pub fn new_ok(inner_index: u32) -> Self {
403 Self::encode(TAG_OK, ARENA_REF_BIT | (inner_index as u64))
404 }
405
406 #[inline]
407 pub fn new_err(inner_index: u32) -> Self {
408 Self::encode(TAG_ERR, ARENA_REF_BIT | (inner_index as u64))
409 }
410
411 #[inline]
412 pub fn wrapper_index(self) -> u32 {
413 debug_assert!(
414 self.is_nan_boxed()
415 && matches!(self.tag(), TAG_SOME | TAG_OK | TAG_ERR)
416 && self.payload() & ARENA_REF_BIT != 0
417 );
418 self.arena_index()
419 }
420
421 #[inline]
422 pub fn wrapper_inner(self, arena: &Arena) -> NanValue {
423 self.wrapper_parts(arena)
424 .map(|(_, inner)| inner)
425 .expect("wrapper_inner() called on non-wrapper")
426 }
427
428 #[inline]
429 fn wrap_value(kind: u64, inner: NanValue, arena: &mut Arena) -> Self {
430 if let Some(payload) = inner.plain_immediate_payload() {
431 let tag = match kind {
432 WRAP_SOME => TAG_SOME,
433 WRAP_OK => TAG_OK,
434 WRAP_ERR => TAG_ERR,
435 _ => unreachable!("invalid wrapper kind"),
436 };
437 Self::new_inline_wrapper(tag, WRAPPER_INLINE_IMMEDIATE, payload)
438 } else if inner.is_none() {
439 let tag = match kind {
440 WRAP_SOME => TAG_SOME,
441 WRAP_OK => TAG_OK,
442 WRAP_ERR => TAG_ERR,
443 _ => unreachable!("invalid wrapper kind"),
444 };
445 Self::new_inline_wrapper(tag, WRAPPER_INLINE_NONE, 0)
446 } else if let Some(value) = inner.inline_int_value() {
447 if (WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&value) {
448 let tag = match kind {
449 WRAP_SOME => TAG_SOME,
450 WRAP_OK => TAG_OK,
451 WRAP_ERR => TAG_ERR,
452 _ => unreachable!("invalid wrapper kind"),
453 };
454 return Self::new_inline_wrapper(
455 tag,
456 WRAPPER_INLINE_INT,
457 Self::encode_wrapper_inline_int(value),
458 );
459 }
460 let idx = arena.push_boxed(inner);
461 match kind {
462 WRAP_SOME => Self::new_some(idx),
463 WRAP_OK => Self::new_ok(idx),
464 WRAP_ERR => Self::new_err(idx),
465 _ => unreachable!("invalid wrapper kind"),
466 }
467 } else {
468 let idx = arena.push_boxed(inner);
469 match kind {
470 WRAP_SOME => Self::new_some(idx),
471 WRAP_OK => Self::new_ok(idx),
472 WRAP_ERR => Self::new_err(idx),
473 _ => unreachable!("invalid wrapper kind"),
474 }
475 }
476 }
477
478 #[inline]
479 pub fn new_some_value(inner: NanValue, arena: &mut Arena) -> Self {
480 Self::wrap_value(WRAP_SOME, inner, arena)
481 }
482
483 #[inline]
484 pub fn new_ok_value(inner: NanValue, arena: &mut Arena) -> Self {
485 Self::wrap_value(WRAP_OK, inner, arena)
486 }
487
488 #[inline]
489 pub fn new_err_value(inner: NanValue, arena: &mut Arena) -> Self {
490 Self::wrap_value(WRAP_ERR, inner, arena)
491 }
492
493 #[inline]
496 pub fn new_string(arena_index: u32) -> Self {
497 Self::encode(TAG_STRING, STRING_ARENA_BIT | (arena_index as u64))
498 }
499
500 #[inline]
501 fn new_small_string_bytes(bytes: &[u8]) -> Self {
502 debug_assert!(bytes.len() <= STRING_INLINE_MAX_BYTES);
503 let mut payload = (bytes.len() as u64) << STRING_INLINE_LEN_SHIFT;
504 for (idx, byte) in bytes.iter().enumerate() {
505 payload |= (*byte as u64) << (idx * 8);
506 }
507 Self::encode(TAG_STRING, payload)
508 }
509
510 #[inline]
511 pub(crate) fn small_string(self) -> Option<NanString<'static>> {
512 if !self.is_nan_boxed()
513 || self.tag() != TAG_STRING
514 || self.payload() & STRING_ARENA_BIT != 0
515 {
516 return None;
517 }
518 let payload = self.payload();
519 let len = ((payload & STRING_INLINE_LEN_MASK) >> STRING_INLINE_LEN_SHIFT) as u8;
520 if len as usize > STRING_INLINE_MAX_BYTES {
521 return None;
522 }
523 let mut bytes = [0u8; STRING_INLINE_MAX_BYTES];
524 for (idx, slot) in bytes.iter_mut().take(len as usize).enumerate() {
525 *slot = ((payload >> (idx * 8)) & 0xFF) as u8;
526 }
527 Some(NanString::Inline { len, bytes })
528 }
529
530 #[inline]
531 pub fn new_string_value(s: &str, arena: &mut Arena) -> Self {
532 if s.len() <= STRING_INLINE_MAX_BYTES {
533 Self::new_small_string_bytes(s.as_bytes())
534 } else {
535 Self::new_string(arena.push_string(s))
536 }
537 }
538
539 #[inline]
540 pub fn new_list(arena_index: u32) -> Self {
541 Self::encode(TAG_LIST, ARENA_REF_BIT | (arena_index as u64))
542 }
543
544 #[inline]
545 pub fn new_tuple(arena_index: u32) -> Self {
546 Self::encode(TAG_TUPLE, ARENA_REF_BIT | (arena_index as u64))
547 }
548
549 #[inline]
550 pub fn new_map(arena_index: u32) -> Self {
551 Self::encode(TAG_MAP, ARENA_REF_BIT | (arena_index as u64))
552 }
553
554 #[inline]
555 pub fn new_vector(arena_index: u32) -> Self {
556 Self::encode(TAG_VECTOR, ARENA_REF_BIT | (arena_index as u64))
557 }
558
559 #[inline]
560 pub fn new_record(arena_index: u32) -> Self {
561 Self::encode(TAG_RECORD, ARENA_REF_BIT | (arena_index as u64))
562 }
563
564 #[inline]
565 pub fn new_variant(arena_index: u32) -> Self {
566 Self::encode(TAG_VARIANT, ARENA_REF_BIT | (arena_index as u64))
567 }
568
569 #[inline]
570 fn new_symbol(symbol_kind: u64, symbol_index: u32) -> Self {
571 Self::encode(TAG_SYMBOL, symbol_kind | ((symbol_index as u64) << 2))
572 }
573
574 #[inline]
575 pub(crate) fn symbol_kind(self) -> u64 {
576 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
577 self.payload() & SYMBOL_KIND_MASK
578 }
579
580 #[inline]
581 pub(crate) fn symbol_index(self) -> u32 {
582 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
583 (self.payload() >> 2) as u32
584 }
585
586 #[inline]
587 pub fn new_nullary_variant(symbol_index: u32) -> Self {
588 Self::new_symbol(SYMBOL_NULLARY_VARIANT, symbol_index)
589 }
590
591 #[inline]
592 pub fn new_fn(arena_index: u32) -> Self {
593 Self::new_symbol(SYMBOL_FN, arena_index)
594 }
595
596 #[inline]
597 pub fn new_builtin(arena_index: u32) -> Self {
598 Self::new_symbol(SYMBOL_BUILTIN, arena_index)
599 }
600
601 #[inline]
602 pub fn new_namespace(arena_index: u32) -> Self {
603 Self::new_symbol(SYMBOL_NAMESPACE, arena_index)
604 }
605
606 #[inline]
607 pub fn arena_index(self) -> u32 {
608 (self.payload() & !ARENA_REF_BIT) as u32
609 }
610
611 #[inline]
612 pub fn heap_index(self) -> Option<u32> {
613 if !self.is_nan_boxed() {
614 return None;
615 }
616 match self.tag() {
617 TAG_INT => {
618 let p = self.payload();
619 if p & INT_BIG_BIT != 0 {
620 Some((p & !INT_BIG_BIT) as u32)
621 } else {
622 None
623 }
624 }
625 TAG_STRING | TAG_SOME | TAG_OK | TAG_ERR | TAG_LIST | TAG_TUPLE | TAG_MAP
626 | TAG_RECORD | TAG_VARIANT | TAG_VECTOR => {
627 (self.payload() & ARENA_REF_BIT != 0).then_some(self.arena_index())
628 }
629 _ => None,
630 }
631 }
632
633 #[inline]
634 pub fn with_heap_index(self, index: u32) -> Self {
635 if !self.is_nan_boxed() {
636 return self;
637 }
638 match self.tag() {
639 TAG_INT => {
640 debug_assert!(self.payload() & INT_BIG_BIT != 0);
641 Self::new_int_arena(index)
642 }
643 TAG_SOME => Self::new_some(index),
644 TAG_OK => Self::new_ok(index),
645 TAG_ERR => Self::new_err(index),
646 TAG_STRING => Self::new_string(index),
647 TAG_LIST => Self::new_list(index),
648 TAG_TUPLE => Self::new_tuple(index),
649 TAG_MAP => Self::new_map(index),
650 TAG_VECTOR => Self::new_vector(index),
651 TAG_RECORD => Self::new_record(index),
652 TAG_VARIANT => Self::new_variant(index),
653 _ => self,
654 }
655 }
656
657 #[inline]
660 pub fn is_float(self) -> bool {
661 !self.is_nan_boxed()
662 }
663
664 #[inline]
665 pub fn is_int(self) -> bool {
666 self.is_nan_boxed() && self.tag() == TAG_INT
667 }
668
669 #[inline]
670 pub fn is_bool(self) -> bool {
671 self.is_nan_boxed()
672 && self.tag() == TAG_IMMEDIATE
673 && (self.payload() == IMM_TRUE || self.payload() == IMM_FALSE)
674 }
675
676 #[inline]
677 pub fn is_unit(self) -> bool {
678 self.0 == Self::UNIT.0
679 }
680
681 #[inline]
682 pub fn is_none(self) -> bool {
683 self.0 == Self::NONE.0
684 }
685
686 #[inline]
687 pub fn is_some(self) -> bool {
688 self.is_nan_boxed() && self.tag() == TAG_SOME
689 }
690
691 #[inline]
692 pub fn is_ok(self) -> bool {
693 self.is_nan_boxed() && self.tag() == TAG_OK
694 }
695
696 #[inline]
697 pub fn is_err(self) -> bool {
698 self.is_nan_boxed() && self.tag() == TAG_ERR
699 }
700
701 #[inline]
702 pub fn is_string(self) -> bool {
703 self.is_nan_boxed() && self.tag() == TAG_STRING
704 }
705
706 pub fn string_eq(self, other: NanValue, arena: &Arena) -> bool {
709 if self.bits() == other.bits() {
710 return true; }
712 if !self.is_string() || !other.is_string() {
713 return false;
714 }
715 arena.get_string_value(self).as_str() == arena.get_string_value(other).as_str()
716 }
717
718 #[inline]
719 pub fn is_list(self) -> bool {
720 self.is_nan_boxed() && self.tag() == TAG_LIST
721 }
722
723 #[inline]
724 pub fn is_record(self) -> bool {
725 self.is_nan_boxed() && self.tag() == TAG_RECORD
726 }
727
728 #[inline]
729 pub fn is_fn(self) -> bool {
730 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_FN
731 }
732
733 #[inline]
734 pub fn is_variant(self) -> bool {
735 self.is_nan_boxed()
736 && (self.tag() == TAG_VARIANT
737 || (self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NULLARY_VARIANT))
738 }
739
740 #[inline]
741 pub fn is_map(self) -> bool {
742 self.is_nan_boxed() && self.tag() == TAG_MAP
743 }
744
745 #[inline]
746 pub fn is_vector(self) -> bool {
747 self.is_nan_boxed() && self.tag() == TAG_VECTOR
748 }
749
750 #[inline]
751 pub fn is_empty_vector_immediate(self) -> bool {
752 self.is_nan_boxed() && self.tag() == TAG_VECTOR && self.payload() & ARENA_REF_BIT == 0
753 }
754
755 #[inline]
756 pub fn is_tuple(self) -> bool {
757 self.is_nan_boxed() && self.tag() == TAG_TUPLE
758 }
759
760 #[inline]
761 pub fn is_builtin(self) -> bool {
762 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_BUILTIN
763 }
764
765 #[inline]
766 pub fn is_namespace(self) -> bool {
767 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NAMESPACE
768 }
769
770 #[inline]
771 pub fn is_empty_list_immediate(self) -> bool {
772 self.is_nan_boxed() && self.tag() == TAG_LIST && self.payload() & ARENA_REF_BIT == 0
773 }
774
775 #[inline]
776 pub fn is_empty_map_immediate(self) -> bool {
777 self.is_nan_boxed() && self.tag() == TAG_MAP && self.payload() & ARENA_REF_BIT == 0
778 }
779
780 pub fn type_name(self) -> &'static str {
781 if self.is_float() {
782 return "Float";
783 }
784 match self.tag() {
785 TAG_INT => "Int",
786 TAG_IMMEDIATE => match self.payload() {
787 IMM_FALSE | IMM_TRUE => "Bool",
788 IMM_UNIT => "Unit",
789 _ => "Unknown",
790 },
791 TAG_SOME => "Option.Some",
792 TAG_NONE => "Option.None",
793 TAG_OK => "Result.Ok",
794 TAG_ERR => "Result.Err",
795 TAG_STRING => "String",
796 TAG_LIST => "List",
797 TAG_TUPLE => "Tuple",
798 TAG_MAP => "Map",
799 TAG_VECTOR => "Vector",
800 TAG_RECORD => "Record",
801 TAG_VARIANT => "Variant",
802 TAG_SYMBOL => match self.symbol_kind() {
803 SYMBOL_FN => "Fn",
804 SYMBOL_BUILTIN => "Builtin",
805 SYMBOL_NAMESPACE => "Namespace",
806 SYMBOL_NULLARY_VARIANT => "Variant",
807 _ => "Unknown",
808 },
809 _ => "Unknown",
810 }
811 }
812
813 #[inline]
814 pub fn variant_ctor_id(self, arena: &Arena) -> Option<u32> {
815 if !self.is_nan_boxed() {
816 return None;
817 }
818 match self.tag() {
819 TAG_VARIANT => {
820 let (type_id, variant_id, _) = arena.get_variant(self.arena_index());
821 arena.find_ctor_id(type_id, variant_id)
822 }
823 TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
824 Some(arena.get_nullary_variant_ctor(self.symbol_index()))
825 }
826 _ => None,
827 }
828 }
829
830 #[inline]
831 pub fn variant_parts(self, arena: &Arena) -> Option<(u32, u16, &[NanValue])> {
832 if !self.is_nan_boxed() {
833 return None;
834 }
835 match self.tag() {
836 TAG_VARIANT => {
837 let (type_id, variant_id, fields) = arena.get_variant(self.arena_index());
838 Some((type_id, variant_id, fields))
839 }
840 TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
841 let (type_id, variant_id) =
842 arena.get_ctor_parts(arena.get_nullary_variant_ctor(self.symbol_index()));
843 Some((type_id, variant_id, &[]))
844 }
845 _ => None,
846 }
847 }
848
849 #[inline]
851 pub fn bits(self) -> u64 {
852 self.0
853 }
854
855 #[inline]
856 pub fn from_bits(bits: u64) -> Self {
857 NanValue(bits)
858 }
859
860 pub fn map_key_hash(self, arena: &Arena) -> u64 {
865 if self.is_string() {
866 use std::hash::{Hash, Hasher};
867 let mut hasher = std::collections::hash_map::DefaultHasher::new();
868 3u8.hash(&mut hasher);
869 arena.get_string_value(self).hash(&mut hasher);
870 hasher.finish()
871 } else {
872 self.bits()
873 }
874 }
875}
876
877impl std::fmt::Debug for NanValue {
880 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
881 if self.is_float() {
882 return write!(f, "Float({})", self.as_float());
883 }
884 match self.tag() {
885 TAG_INT => {
886 if self.payload() & INT_BIG_BIT != 0 {
887 write!(f, "Int(arena:{})", (self.payload() & !INT_BIG_BIT) as u32)
888 } else {
889 write!(
890 f,
891 "Int({})",
892 Self::decode_inline_int_payload(self.payload())
893 )
894 }
895 }
896 TAG_IMMEDIATE => match self.payload() {
897 IMM_FALSE => write!(f, "False"),
898 IMM_TRUE => write!(f, "True"),
899 IMM_UNIT => write!(f, "Unit"),
900 _ => write!(f, "Immediate({})", self.payload()),
901 },
902 TAG_NONE => write!(f, "None"),
903 TAG_SOME | TAG_OK | TAG_ERR => {
904 let kind = match self.tag() {
905 TAG_SOME => "Some",
906 TAG_OK => "Ok",
907 TAG_ERR => "Err",
908 _ => "?",
909 };
910 if self.payload() & ARENA_REF_BIT != 0 {
911 write!(f, "{}(arena:{})", kind, self.arena_index())
912 } else if let Some(inner) = self.wrapper_inline_inner() {
913 write!(f, "{}({:?})", kind, inner)
914 } else {
915 write!(f, "{}(?)", kind)
916 }
917 }
918 TAG_SYMBOL => match self.symbol_kind() {
919 SYMBOL_FN => write!(f, "Fn(symbol:{})", self.symbol_index()),
920 SYMBOL_BUILTIN => write!(f, "Builtin(symbol:{})", self.symbol_index()),
921 SYMBOL_NAMESPACE => write!(f, "Namespace(symbol:{})", self.symbol_index()),
922 SYMBOL_NULLARY_VARIANT => {
923 write!(f, "NullaryVariant(symbol:{})", self.symbol_index())
924 }
925 _ => write!(f, "Symbol({})", self.payload()),
926 },
927 TAG_STRING => {
928 if let Some(s) = self.small_string() {
929 write!(f, "String({:?})", s.as_str())
930 } else {
931 write!(f, "String(arena:{})", self.arena_index())
932 }
933 }
934 TAG_LIST if self.is_empty_list_immediate() => write!(f, "EmptyList"),
935 TAG_MAP if self.is_empty_map_immediate() => write!(f, "EmptyMap"),
936 TAG_VECTOR if self.is_empty_vector_immediate() => write!(f, "EmptyVector"),
937 _ => write!(f, "{}(arena:{})", self.type_name(), self.arena_index()),
938 }
939 }
940}
941
942#[derive(Debug, Clone)]
947pub struct Arena {
948 young_entries: Vec<ArenaEntry>,
949 yard_entries: Vec<ArenaEntry>,
950 handoff_entries: Vec<ArenaEntry>,
951 stable_entries: Vec<ArenaEntry>,
952 scratch_young: Vec<u32>,
953 scratch_yard: Vec<u32>,
954 scratch_handoff: Vec<u32>,
955 scratch_stable: Vec<u32>,
956 peak_usage: ArenaUsage,
957 alloc_space: AllocSpace,
958 pub(crate) type_names: Vec<String>,
959 pub(crate) type_field_names: Vec<Vec<String>>,
960 pub(crate) type_variant_names: Vec<Vec<String>>,
961 pub(crate) type_variant_ctor_ids: Vec<Vec<u32>>,
962 pub(crate) ctor_to_type_variant: Vec<(u32, u16)>,
963 pub(crate) symbol_entries: Vec<ArenaSymbol>,
964}
965
966#[derive(Debug, Clone)]
967pub enum ArenaEntry {
968 Int(i64),
969 String(Rc<str>),
970 List(ArenaList),
971 Tuple(Vec<NanValue>),
972 Map(PersistentMap),
973 Vector(Vec<NanValue>),
974 Record {
975 type_id: u32,
976 fields: Vec<NanValue>,
977 },
978 Variant {
979 type_id: u32,
980 variant_id: u16,
981 fields: Vec<NanValue>,
982 },
983 Fn(Rc<FunctionValue>),
984 Builtin(Rc<str>),
985 Namespace {
986 name: Rc<str>,
987 members: Vec<(Rc<str>, NanValue)>,
988 },
989 Boxed(NanValue),
990}
991
992#[derive(Debug, Clone)]
993pub enum ArenaSymbol {
994 Fn(Rc<FunctionValue>),
995 Builtin(Rc<str>),
996 Namespace {
997 name: Rc<str>,
998 members: Vec<(Rc<str>, NanValue)>,
999 },
1000 NullaryVariant {
1001 ctor_id: u32,
1002 },
1003}
1004
1005#[derive(Debug, Clone)]
1006pub enum ArenaList {
1007 Flat {
1008 items: Rc<Vec<NanValue>>,
1009 start: usize,
1010 },
1011 Prepend {
1012 head: NanValue,
1013 tail: NanValue,
1014 len: usize,
1015 },
1016 Concat {
1017 left: NanValue,
1018 right: NanValue,
1019 len: usize,
1020 },
1021 Segments {
1022 current: NanValue,
1023 rest: Rc<Vec<NanValue>>,
1024 start: usize,
1025 len: usize,
1026 },
1027}
1028
1029const LIST_APPEND_CHUNK_LIMIT: usize = 128;
1030
1031#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1032enum HeapSpace {
1033 Young = 0,
1034 Yard = 1,
1035 Handoff = 2,
1036 Stable = 3,
1037}
1038
1039#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1040pub enum AllocSpace {
1041 Young,
1042 Yard,
1043 Handoff,
1044}
1045
1046#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
1047pub struct ArenaUsage {
1048 pub young: usize,
1049 pub yard: usize,
1050 pub handoff: usize,
1051 pub stable: usize,
1052}
1053
1054impl ArenaUsage {
1055 pub fn total(self) -> usize {
1056 self.young + self.yard + self.handoff + self.stable
1057 }
1058}
1059
1060const HEAP_SPACE_SHIFT: u32 = 30;
1061const HEAP_SPACE_MASK_U32: u32 = 0b11 << HEAP_SPACE_SHIFT;
1062const HEAP_INDEX_MASK_U32: u32 = (1 << HEAP_SPACE_SHIFT) - 1;
1063
1064mod arena;
1065mod compare;
1066mod convert;
1067mod lists;
1068mod memory;
1069
1070#[cfg(test)]
1071#[allow(clippy::approx_constant)]
1072mod tests;