1use std::cmp::Ordering;
36use std::hash::{Hash, Hasher};
37use std::ops::Deref;
38use std::rc::Rc;
39
40use crate::value::FunctionValue;
41
42pub type PersistentMap = im::HashMap<u64, (NanValue, NanValue)>;
44
45const QNAN: u64 = 0x7FFC_0000_0000_0000;
50const QNAN_MASK: u64 = 0xFFFC_0000_0000_0000;
51const TAG_SHIFT: u32 = 46;
52const TAG_MASK: u64 = 0xF;
53const PAYLOAD_MASK: u64 = (1u64 << 46) - 1;
54
55const TAG_IMMEDIATE: u64 = 0;
56const TAG_SYMBOL: u64 = 1;
57const TAG_INT: u64 = 2;
58const TAG_STRING: u64 = 3;
59const TAG_SOME: u64 = 4;
60const TAG_NONE: u64 = 5;
61const TAG_OK: u64 = 6;
62const TAG_ERR: u64 = 7;
63const TAG_LIST: u64 = 8;
64const TAG_TUPLE: u64 = 9;
65const TAG_MAP: u64 = 10;
66const TAG_RECORD: u64 = 11;
67const TAG_VARIANT: u64 = 12;
68
69const SYMBOL_FN: u64 = 0;
70const SYMBOL_BUILTIN: u64 = 1;
71const SYMBOL_NAMESPACE: u64 = 2;
72const SYMBOL_NULLARY_VARIANT: u64 = 3;
73const SYMBOL_KIND_MASK: u64 = 0b11;
74
75const IMM_FALSE: u64 = 0;
76const IMM_TRUE: u64 = 1;
77const IMM_UNIT: u64 = 2;
78
79const WRAP_SOME: u64 = 0;
80const WRAP_OK: u64 = 1;
81const WRAP_ERR: u64 = 2;
82const WRAPPER_INLINE_KIND_SHIFT: u32 = 43;
83const WRAPPER_INLINE_KIND_MASK: u64 = 0b11 << WRAPPER_INLINE_KIND_SHIFT;
84const WRAPPER_INLINE_PAYLOAD_MASK: u64 = (1u64 << WRAPPER_INLINE_KIND_SHIFT) - 1;
85const WRAPPER_INLINE_IMMEDIATE: u64 = 0;
86const WRAPPER_INLINE_INT: u64 = 1;
87const WRAPPER_INLINE_NONE: u64 = 2;
88const WRAPPER_INT_INLINE_MASK: u64 = WRAPPER_INLINE_PAYLOAD_MASK;
89const WRAPPER_INT_INLINE_MAX: i64 = (1i64 << 42) - 1;
90const WRAPPER_INT_INLINE_MIN: i64 = -(1i64 << 42);
91
92const ARENA_REF_BIT: u64 = 1u64 << 45;
93const INT_BIG_BIT: u64 = ARENA_REF_BIT;
94const INT_INLINE_MASK: u64 = (1u64 << 45) - 1;
95const INT_INLINE_MAX: i64 = (1i64 << 44) - 1;
96const INT_INLINE_MIN: i64 = -(1i64 << 44);
97
98const STRING_ARENA_BIT: u64 = ARENA_REF_BIT;
99const STRING_INLINE_LEN_SHIFT: u32 = 40;
100const STRING_INLINE_LEN_MASK: u64 = 0b111 << STRING_INLINE_LEN_SHIFT;
101const STRING_INLINE_MAX_BYTES: usize = 5;
102
103#[derive(Clone, Copy)]
108pub struct NanValue(u64);
109
110#[derive(Clone, Copy, Debug)]
111pub enum NanString<'a> {
112 Borrowed(&'a str),
113 Inline {
114 len: u8,
115 bytes: [u8; STRING_INLINE_MAX_BYTES],
116 },
117}
118
119impl<'a> NanString<'a> {
120 #[inline]
121 pub fn as_str(&self) -> &str {
122 match self {
123 NanString::Borrowed(s) => s,
124 NanString::Inline { len, bytes } => std::str::from_utf8(&bytes[..*len as usize])
125 .expect("NanString inline payload must be valid UTF-8"),
126 }
127 }
128}
129
130impl Deref for NanString<'_> {
131 type Target = str;
132
133 #[inline]
134 fn deref(&self) -> &Self::Target {
135 self.as_str()
136 }
137}
138
139impl PartialEq for NanString<'_> {
140 #[inline]
141 fn eq(&self, other: &Self) -> bool {
142 self.as_str() == other.as_str()
143 }
144}
145
146impl Eq for NanString<'_> {}
147
148impl PartialEq<&str> for NanString<'_> {
149 #[inline]
150 fn eq(&self, other: &&str) -> bool {
151 self.as_str() == *other
152 }
153}
154
155impl PartialEq<NanString<'_>> for &str {
156 #[inline]
157 fn eq(&self, other: &NanString<'_>) -> bool {
158 *self == other.as_str()
159 }
160}
161
162impl PartialOrd for NanString<'_> {
163 #[inline]
164 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
165 Some(self.cmp(other))
166 }
167}
168
169impl Ord for NanString<'_> {
170 #[inline]
171 fn cmp(&self, other: &Self) -> Ordering {
172 self.as_str().cmp(other.as_str())
173 }
174}
175
176impl Hash for NanString<'_> {
177 #[inline]
178 fn hash<H: Hasher>(&self, state: &mut H) {
179 self.as_str().hash(state);
180 }
181}
182
183impl std::fmt::Display for NanString<'_> {
184 #[inline]
185 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
186 f.write_str(self.as_str())
187 }
188}
189
190impl NanValue {
193 #[inline]
194 fn decode_inline_int_payload(payload: u64) -> i64 {
195 debug_assert!(payload & INT_BIG_BIT == 0);
196 let raw = payload & INT_INLINE_MASK;
197 if raw & (1u64 << 44) != 0 {
198 (raw | !INT_INLINE_MASK) as i64
199 } else {
200 raw as i64
201 }
202 }
203
204 #[inline]
205 fn encode(tag: u64, payload: u64) -> Self {
206 debug_assert!(tag <= TAG_MASK);
207 debug_assert!(payload <= PAYLOAD_MASK);
208 NanValue(QNAN | (tag << TAG_SHIFT) | payload)
209 }
210
211 #[inline]
212 fn is_nan_boxed(self) -> bool {
213 (self.0 & QNAN_MASK) == QNAN
214 }
215
216 #[inline]
217 fn tag(self) -> u64 {
218 (self.0 >> TAG_SHIFT) & TAG_MASK
219 }
220
221 #[inline]
222 fn payload(self) -> u64 {
223 self.0 & PAYLOAD_MASK
224 }
225
226 #[inline]
229 pub fn new_float(f: f64) -> Self {
230 let bits = f.to_bits();
231 if (bits & QNAN_MASK) == QNAN {
232 NanValue(bits ^ 1)
233 } else {
234 NanValue(bits)
235 }
236 }
237
238 #[inline]
239 pub fn as_float(self) -> f64 {
240 f64::from_bits(self.0)
241 }
242
243 #[inline]
244 pub fn new_int_inline(i: i64) -> Self {
245 debug_assert!((INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i));
246 let payload = (i as u64) & INT_INLINE_MASK;
247 Self::encode(TAG_INT, payload)
248 }
249
250 #[inline]
251 pub fn new_int_arena(arena_index: u32) -> Self {
252 Self::encode(TAG_INT, INT_BIG_BIT | (arena_index as u64))
253 }
254
255 #[inline]
256 pub fn new_int(i: i64, arena: &mut Arena) -> Self {
257 if (INT_INLINE_MIN..=INT_INLINE_MAX).contains(&i) {
258 Self::new_int_inline(i)
259 } else {
260 let idx = arena.push_i64(i);
261 Self::new_int_arena(idx)
262 }
263 }
264
265 #[inline]
266 pub fn as_int(self, arena: &Arena) -> i64 {
267 let p = self.payload();
268 if p & INT_BIG_BIT != 0 {
269 let idx = (p & !INT_BIG_BIT) as u32;
270 arena.get_i64(idx)
271 } else {
272 Self::decode_inline_int_payload(p)
273 }
274 }
275
276 #[inline]
277 fn inline_int_payload(self) -> Option<u64> {
278 (self.is_nan_boxed() && self.tag() == TAG_INT && self.payload() & INT_BIG_BIT == 0)
279 .then_some(self.payload())
280 }
281
282 #[inline]
283 pub fn inline_int_value(self) -> Option<i64> {
284 self.inline_int_payload()
285 .map(Self::decode_inline_int_payload)
286 }
287
288 pub const FALSE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_FALSE);
291 pub const TRUE: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_TRUE);
292 pub const UNIT: NanValue = NanValue(QNAN | (TAG_IMMEDIATE << TAG_SHIFT) | IMM_UNIT);
293 pub const NONE: NanValue = NanValue(QNAN | (TAG_NONE << TAG_SHIFT));
294 pub const EMPTY_LIST: NanValue = NanValue(QNAN | (TAG_LIST << TAG_SHIFT));
295 pub const EMPTY_MAP: NanValue = NanValue(QNAN | (TAG_MAP << TAG_SHIFT));
296 pub const EMPTY_STRING: NanValue = NanValue(QNAN | (TAG_STRING << TAG_SHIFT));
297
298 #[inline]
299 pub fn new_bool(b: bool) -> Self {
300 if b { Self::TRUE } else { Self::FALSE }
301 }
302
303 #[inline]
304 pub fn as_bool(self) -> bool {
305 self.0 == Self::TRUE.0
306 }
307
308 #[inline]
309 fn plain_immediate_payload(self) -> Option<u64> {
310 (self.is_nan_boxed() && self.tag() == TAG_IMMEDIATE && self.payload() <= IMM_UNIT)
311 .then_some(self.payload())
312 }
313
314 #[inline]
315 fn wrapper_kind(self) -> u64 {
316 match self.tag() {
317 TAG_SOME => WRAP_SOME,
318 TAG_OK => WRAP_OK,
319 TAG_ERR => WRAP_ERR,
320 _ => panic!("wrapper_kind() called on non-wrapper"),
321 }
322 }
323
324 #[inline]
325 fn wrapper_inline_kind(self) -> Option<u64> {
326 if !self.is_nan_boxed() {
327 return None;
328 }
329 match self.tag() {
330 TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT == 0 => {
331 Some((self.payload() & WRAPPER_INLINE_KIND_MASK) >> WRAPPER_INLINE_KIND_SHIFT)
332 }
333 _ => None,
334 }
335 }
336
337 #[inline]
338 fn decode_wrapper_inline_int_payload(payload: u64) -> i64 {
339 let raw = payload & WRAPPER_INT_INLINE_MASK;
340 if raw & (1u64 << 42) != 0 {
341 (raw | !WRAPPER_INT_INLINE_MASK) as i64
342 } else {
343 raw as i64
344 }
345 }
346
347 #[inline]
348 fn encode_wrapper_inline_int(i: i64) -> u64 {
349 debug_assert!((WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&i));
350 (i as u64) & WRAPPER_INT_INLINE_MASK
351 }
352
353 #[inline]
354 fn wrapper_inline_inner(self) -> Option<NanValue> {
355 let kind = self.wrapper_inline_kind()?;
356 let payload = self.payload() & WRAPPER_INLINE_PAYLOAD_MASK;
357 match kind {
358 WRAPPER_INLINE_IMMEDIATE => Some(Self::encode(TAG_IMMEDIATE, payload)),
359 WRAPPER_INLINE_INT => Some(Self::new_int_inline(
360 Self::decode_wrapper_inline_int_payload(payload),
361 )),
362 WRAPPER_INLINE_NONE => Some(Self::NONE),
363 _ => None,
364 }
365 }
366
367 #[inline]
368 fn new_inline_wrapper(tag: u64, inline_kind: u64, payload: u64) -> Self {
369 debug_assert!(matches!(tag, TAG_SOME | TAG_OK | TAG_ERR));
370 debug_assert!(inline_kind <= WRAPPER_INLINE_NONE);
371 debug_assert!(payload <= WRAPPER_INLINE_PAYLOAD_MASK);
372 Self::encode(tag, (inline_kind << WRAPPER_INLINE_KIND_SHIFT) | payload)
373 }
374
375 #[inline]
376 fn wrapper_parts(self, arena: &Arena) -> Option<(u64, NanValue)> {
377 if !self.is_nan_boxed() {
378 return None;
379 }
380 match self.tag() {
381 TAG_SOME | TAG_OK | TAG_ERR if self.payload() & ARENA_REF_BIT != 0 => {
382 Some((self.wrapper_kind(), arena.get_boxed(self.arena_index())))
383 }
384 TAG_SOME | TAG_OK | TAG_ERR => self
385 .wrapper_inline_inner()
386 .map(|inner| (self.wrapper_kind(), inner)),
387 _ => None,
388 }
389 }
390
391 #[inline]
394 pub fn new_some(inner_index: u32) -> Self {
395 Self::encode(TAG_SOME, ARENA_REF_BIT | (inner_index as u64))
396 }
397
398 #[inline]
399 pub fn new_ok(inner_index: u32) -> Self {
400 Self::encode(TAG_OK, ARENA_REF_BIT | (inner_index as u64))
401 }
402
403 #[inline]
404 pub fn new_err(inner_index: u32) -> Self {
405 Self::encode(TAG_ERR, ARENA_REF_BIT | (inner_index as u64))
406 }
407
408 #[inline]
409 pub fn wrapper_index(self) -> u32 {
410 debug_assert!(
411 self.is_nan_boxed()
412 && matches!(self.tag(), TAG_SOME | TAG_OK | TAG_ERR)
413 && self.payload() & ARENA_REF_BIT != 0
414 );
415 self.arena_index()
416 }
417
418 #[inline]
419 pub fn wrapper_inner(self, arena: &Arena) -> NanValue {
420 self.wrapper_parts(arena)
421 .map(|(_, inner)| inner)
422 .expect("wrapper_inner() called on non-wrapper")
423 }
424
425 #[inline]
426 fn wrap_value(kind: u64, inner: NanValue, arena: &mut Arena) -> Self {
427 if let Some(payload) = inner.plain_immediate_payload() {
428 let tag = match kind {
429 WRAP_SOME => TAG_SOME,
430 WRAP_OK => TAG_OK,
431 WRAP_ERR => TAG_ERR,
432 _ => unreachable!("invalid wrapper kind"),
433 };
434 Self::new_inline_wrapper(tag, WRAPPER_INLINE_IMMEDIATE, payload)
435 } else if inner.is_none() {
436 let tag = match kind {
437 WRAP_SOME => TAG_SOME,
438 WRAP_OK => TAG_OK,
439 WRAP_ERR => TAG_ERR,
440 _ => unreachable!("invalid wrapper kind"),
441 };
442 Self::new_inline_wrapper(tag, WRAPPER_INLINE_NONE, 0)
443 } else if let Some(value) = inner.inline_int_value() {
444 if (WRAPPER_INT_INLINE_MIN..=WRAPPER_INT_INLINE_MAX).contains(&value) {
445 let tag = match kind {
446 WRAP_SOME => TAG_SOME,
447 WRAP_OK => TAG_OK,
448 WRAP_ERR => TAG_ERR,
449 _ => unreachable!("invalid wrapper kind"),
450 };
451 return Self::new_inline_wrapper(
452 tag,
453 WRAPPER_INLINE_INT,
454 Self::encode_wrapper_inline_int(value),
455 );
456 }
457 let idx = arena.push_boxed(inner);
458 match kind {
459 WRAP_SOME => Self::new_some(idx),
460 WRAP_OK => Self::new_ok(idx),
461 WRAP_ERR => Self::new_err(idx),
462 _ => unreachable!("invalid wrapper kind"),
463 }
464 } else {
465 let idx = arena.push_boxed(inner);
466 match kind {
467 WRAP_SOME => Self::new_some(idx),
468 WRAP_OK => Self::new_ok(idx),
469 WRAP_ERR => Self::new_err(idx),
470 _ => unreachable!("invalid wrapper kind"),
471 }
472 }
473 }
474
475 #[inline]
476 pub fn new_some_value(inner: NanValue, arena: &mut Arena) -> Self {
477 Self::wrap_value(WRAP_SOME, inner, arena)
478 }
479
480 #[inline]
481 pub fn new_ok_value(inner: NanValue, arena: &mut Arena) -> Self {
482 Self::wrap_value(WRAP_OK, inner, arena)
483 }
484
485 #[inline]
486 pub fn new_err_value(inner: NanValue, arena: &mut Arena) -> Self {
487 Self::wrap_value(WRAP_ERR, inner, arena)
488 }
489
490 #[inline]
493 pub fn new_string(arena_index: u32) -> Self {
494 Self::encode(TAG_STRING, STRING_ARENA_BIT | (arena_index as u64))
495 }
496
497 #[inline]
498 fn new_small_string_bytes(bytes: &[u8]) -> Self {
499 debug_assert!(bytes.len() <= STRING_INLINE_MAX_BYTES);
500 let mut payload = (bytes.len() as u64) << STRING_INLINE_LEN_SHIFT;
501 for (idx, byte) in bytes.iter().enumerate() {
502 payload |= (*byte as u64) << (idx * 8);
503 }
504 Self::encode(TAG_STRING, payload)
505 }
506
507 #[inline]
508 pub(crate) fn small_string(self) -> Option<NanString<'static>> {
509 if !self.is_nan_boxed()
510 || self.tag() != TAG_STRING
511 || self.payload() & STRING_ARENA_BIT != 0
512 {
513 return None;
514 }
515 let payload = self.payload();
516 let len = ((payload & STRING_INLINE_LEN_MASK) >> STRING_INLINE_LEN_SHIFT) as u8;
517 if len as usize > STRING_INLINE_MAX_BYTES {
518 return None;
519 }
520 let mut bytes = [0u8; STRING_INLINE_MAX_BYTES];
521 for (idx, slot) in bytes.iter_mut().take(len as usize).enumerate() {
522 *slot = ((payload >> (idx * 8)) & 0xFF) as u8;
523 }
524 Some(NanString::Inline { len, bytes })
525 }
526
527 #[inline]
528 pub fn new_string_value(s: &str, arena: &mut Arena) -> Self {
529 if s.len() <= STRING_INLINE_MAX_BYTES {
530 Self::new_small_string_bytes(s.as_bytes())
531 } else {
532 Self::new_string(arena.push_string(s))
533 }
534 }
535
536 #[inline]
537 pub fn new_list(arena_index: u32) -> Self {
538 Self::encode(TAG_LIST, ARENA_REF_BIT | (arena_index as u64))
539 }
540
541 #[inline]
542 pub fn new_tuple(arena_index: u32) -> Self {
543 Self::encode(TAG_TUPLE, ARENA_REF_BIT | (arena_index as u64))
544 }
545
546 #[inline]
547 pub fn new_map(arena_index: u32) -> Self {
548 Self::encode(TAG_MAP, ARENA_REF_BIT | (arena_index as u64))
549 }
550
551 #[inline]
552 pub fn new_record(arena_index: u32) -> Self {
553 Self::encode(TAG_RECORD, ARENA_REF_BIT | (arena_index as u64))
554 }
555
556 #[inline]
557 pub fn new_variant(arena_index: u32) -> Self {
558 Self::encode(TAG_VARIANT, ARENA_REF_BIT | (arena_index as u64))
559 }
560
561 #[inline]
562 fn new_symbol(symbol_kind: u64, symbol_index: u32) -> Self {
563 Self::encode(TAG_SYMBOL, symbol_kind | ((symbol_index as u64) << 2))
564 }
565
566 #[inline]
567 pub(crate) fn symbol_kind(self) -> u64 {
568 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
569 self.payload() & SYMBOL_KIND_MASK
570 }
571
572 #[inline]
573 pub(crate) fn symbol_index(self) -> u32 {
574 debug_assert!(self.is_nan_boxed() && self.tag() == TAG_SYMBOL);
575 (self.payload() >> 2) as u32
576 }
577
578 #[inline]
579 pub fn new_nullary_variant(symbol_index: u32) -> Self {
580 Self::new_symbol(SYMBOL_NULLARY_VARIANT, symbol_index)
581 }
582
583 #[inline]
584 pub fn new_fn(arena_index: u32) -> Self {
585 Self::new_symbol(SYMBOL_FN, arena_index)
586 }
587
588 #[inline]
589 pub fn new_builtin(arena_index: u32) -> Self {
590 Self::new_symbol(SYMBOL_BUILTIN, arena_index)
591 }
592
593 #[inline]
594 pub fn new_namespace(arena_index: u32) -> Self {
595 Self::new_symbol(SYMBOL_NAMESPACE, arena_index)
596 }
597
598 #[inline]
599 pub fn arena_index(self) -> u32 {
600 (self.payload() & !ARENA_REF_BIT) as u32
601 }
602
603 #[inline]
604 pub fn heap_index(self) -> Option<u32> {
605 if !self.is_nan_boxed() {
606 return None;
607 }
608 match self.tag() {
609 TAG_INT => {
610 let p = self.payload();
611 if p & INT_BIG_BIT != 0 {
612 Some((p & !INT_BIG_BIT) as u32)
613 } else {
614 None
615 }
616 }
617 TAG_STRING | TAG_SOME | TAG_OK | TAG_ERR | TAG_LIST | TAG_TUPLE | TAG_MAP
618 | TAG_RECORD | TAG_VARIANT => {
619 (self.payload() & ARENA_REF_BIT != 0).then_some(self.arena_index())
620 }
621 _ => None,
622 }
623 }
624
625 #[inline]
626 pub fn with_heap_index(self, index: u32) -> Self {
627 if !self.is_nan_boxed() {
628 return self;
629 }
630 match self.tag() {
631 TAG_INT => {
632 debug_assert!(self.payload() & INT_BIG_BIT != 0);
633 Self::new_int_arena(index)
634 }
635 TAG_SOME => Self::new_some(index),
636 TAG_OK => Self::new_ok(index),
637 TAG_ERR => Self::new_err(index),
638 TAG_STRING => Self::new_string(index),
639 TAG_LIST => Self::new_list(index),
640 TAG_TUPLE => Self::new_tuple(index),
641 TAG_MAP => Self::new_map(index),
642 TAG_RECORD => Self::new_record(index),
643 TAG_VARIANT => Self::new_variant(index),
644 _ => self,
645 }
646 }
647
648 #[inline]
651 pub fn is_float(self) -> bool {
652 !self.is_nan_boxed()
653 }
654
655 #[inline]
656 pub fn is_int(self) -> bool {
657 self.is_nan_boxed() && self.tag() == TAG_INT
658 }
659
660 #[inline]
661 pub fn is_bool(self) -> bool {
662 self.is_nan_boxed()
663 && self.tag() == TAG_IMMEDIATE
664 && (self.payload() == IMM_TRUE || self.payload() == IMM_FALSE)
665 }
666
667 #[inline]
668 pub fn is_unit(self) -> bool {
669 self.0 == Self::UNIT.0
670 }
671
672 #[inline]
673 pub fn is_none(self) -> bool {
674 self.0 == Self::NONE.0
675 }
676
677 #[inline]
678 pub fn is_some(self) -> bool {
679 self.is_nan_boxed() && self.tag() == TAG_SOME
680 }
681
682 #[inline]
683 pub fn is_ok(self) -> bool {
684 self.is_nan_boxed() && self.tag() == TAG_OK
685 }
686
687 #[inline]
688 pub fn is_err(self) -> bool {
689 self.is_nan_boxed() && self.tag() == TAG_ERR
690 }
691
692 #[inline]
693 pub fn is_string(self) -> bool {
694 self.is_nan_boxed() && self.tag() == TAG_STRING
695 }
696
697 #[inline]
698 pub fn is_list(self) -> bool {
699 self.is_nan_boxed() && self.tag() == TAG_LIST
700 }
701
702 #[inline]
703 pub fn is_record(self) -> bool {
704 self.is_nan_boxed() && self.tag() == TAG_RECORD
705 }
706
707 #[inline]
708 pub fn is_fn(self) -> bool {
709 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_FN
710 }
711
712 #[inline]
713 pub fn is_variant(self) -> bool {
714 self.is_nan_boxed()
715 && (self.tag() == TAG_VARIANT
716 || (self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NULLARY_VARIANT))
717 }
718
719 #[inline]
720 pub fn is_map(self) -> bool {
721 self.is_nan_boxed() && self.tag() == TAG_MAP
722 }
723
724 #[inline]
725 pub fn is_tuple(self) -> bool {
726 self.is_nan_boxed() && self.tag() == TAG_TUPLE
727 }
728
729 #[inline]
730 pub fn is_builtin(self) -> bool {
731 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_BUILTIN
732 }
733
734 #[inline]
735 pub fn is_namespace(self) -> bool {
736 self.is_nan_boxed() && self.tag() == TAG_SYMBOL && self.symbol_kind() == SYMBOL_NAMESPACE
737 }
738
739 #[inline]
740 pub fn is_empty_list_immediate(self) -> bool {
741 self.is_nan_boxed() && self.tag() == TAG_LIST && self.payload() & ARENA_REF_BIT == 0
742 }
743
744 #[inline]
745 pub fn is_empty_map_immediate(self) -> bool {
746 self.is_nan_boxed() && self.tag() == TAG_MAP && self.payload() & ARENA_REF_BIT == 0
747 }
748
749 pub fn type_name(self) -> &'static str {
750 if self.is_float() {
751 return "Float";
752 }
753 match self.tag() {
754 TAG_INT => "Int",
755 TAG_IMMEDIATE => match self.payload() {
756 IMM_FALSE | IMM_TRUE => "Bool",
757 IMM_UNIT => "Unit",
758 _ => "Unknown",
759 },
760 TAG_SOME => "Option.Some",
761 TAG_NONE => "Option.None",
762 TAG_OK => "Result.Ok",
763 TAG_ERR => "Result.Err",
764 TAG_STRING => "String",
765 TAG_LIST => "List",
766 TAG_TUPLE => "Tuple",
767 TAG_MAP => "Map",
768 TAG_RECORD => "Record",
769 TAG_VARIANT => "Variant",
770 TAG_SYMBOL => match self.symbol_kind() {
771 SYMBOL_FN => "Fn",
772 SYMBOL_BUILTIN => "Builtin",
773 SYMBOL_NAMESPACE => "Namespace",
774 SYMBOL_NULLARY_VARIANT => "Variant",
775 _ => "Unknown",
776 },
777 _ => "Unknown",
778 }
779 }
780
781 #[inline]
782 pub fn variant_ctor_id(self, arena: &Arena) -> Option<u32> {
783 if !self.is_nan_boxed() {
784 return None;
785 }
786 match self.tag() {
787 TAG_VARIANT => {
788 let (type_id, variant_id, _) = arena.get_variant(self.arena_index());
789 arena.find_ctor_id(type_id, variant_id)
790 }
791 TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
792 Some(arena.get_nullary_variant_ctor(self.symbol_index()))
793 }
794 _ => None,
795 }
796 }
797
798 #[inline]
799 pub fn variant_parts(self, arena: &Arena) -> Option<(u32, u16, &[NanValue])> {
800 if !self.is_nan_boxed() {
801 return None;
802 }
803 match self.tag() {
804 TAG_VARIANT => {
805 let (type_id, variant_id, fields) = arena.get_variant(self.arena_index());
806 Some((type_id, variant_id, fields))
807 }
808 TAG_SYMBOL if self.symbol_kind() == SYMBOL_NULLARY_VARIANT => {
809 let (type_id, variant_id) =
810 arena.get_ctor_parts(arena.get_nullary_variant_ctor(self.symbol_index()));
811 Some((type_id, variant_id, &[]))
812 }
813 _ => None,
814 }
815 }
816
817 #[inline]
819 pub fn bits(self) -> u64 {
820 self.0
821 }
822
823 #[inline]
824 pub fn from_bits(bits: u64) -> Self {
825 NanValue(bits)
826 }
827
828 pub fn map_key_hash(self, arena: &Arena) -> u64 {
833 if self.is_string() {
834 use std::hash::{Hash, Hasher};
835 let mut hasher = std::collections::hash_map::DefaultHasher::new();
836 3u8.hash(&mut hasher);
837 arena.get_string_value(self).hash(&mut hasher);
838 hasher.finish()
839 } else {
840 self.bits()
841 }
842 }
843}
844
845impl std::fmt::Debug for NanValue {
848 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
849 if self.is_float() {
850 return write!(f, "Float({})", self.as_float());
851 }
852 match self.tag() {
853 TAG_INT => {
854 if self.payload() & INT_BIG_BIT != 0 {
855 write!(f, "Int(arena:{})", (self.payload() & !INT_BIG_BIT) as u32)
856 } else {
857 write!(
858 f,
859 "Int({})",
860 Self::decode_inline_int_payload(self.payload())
861 )
862 }
863 }
864 TAG_IMMEDIATE => match self.payload() {
865 IMM_FALSE => write!(f, "False"),
866 IMM_TRUE => write!(f, "True"),
867 IMM_UNIT => write!(f, "Unit"),
868 _ => write!(f, "Immediate({})", self.payload()),
869 },
870 TAG_NONE => write!(f, "None"),
871 TAG_SOME | TAG_OK | TAG_ERR => {
872 let kind = match self.tag() {
873 TAG_SOME => "Some",
874 TAG_OK => "Ok",
875 TAG_ERR => "Err",
876 _ => "?",
877 };
878 if self.payload() & ARENA_REF_BIT != 0 {
879 write!(f, "{}(arena:{})", kind, self.arena_index())
880 } else if let Some(inner) = self.wrapper_inline_inner() {
881 write!(f, "{}({:?})", kind, inner)
882 } else {
883 write!(f, "{}(?)", kind)
884 }
885 }
886 TAG_SYMBOL => match self.symbol_kind() {
887 SYMBOL_FN => write!(f, "Fn(symbol:{})", self.symbol_index()),
888 SYMBOL_BUILTIN => write!(f, "Builtin(symbol:{})", self.symbol_index()),
889 SYMBOL_NAMESPACE => write!(f, "Namespace(symbol:{})", self.symbol_index()),
890 SYMBOL_NULLARY_VARIANT => {
891 write!(f, "NullaryVariant(symbol:{})", self.symbol_index())
892 }
893 _ => write!(f, "Symbol({})", self.payload()),
894 },
895 TAG_STRING => {
896 if let Some(s) = self.small_string() {
897 write!(f, "String({:?})", s.as_str())
898 } else {
899 write!(f, "String(arena:{})", self.arena_index())
900 }
901 }
902 TAG_LIST if self.is_empty_list_immediate() => write!(f, "EmptyList"),
903 TAG_MAP if self.is_empty_map_immediate() => write!(f, "EmptyMap"),
904 _ => write!(f, "{}(arena:{})", self.type_name(), self.arena_index()),
905 }
906 }
907}
908
909#[derive(Debug, Clone)]
914pub struct Arena {
915 young_entries: Vec<ArenaEntry>,
916 yard_entries: Vec<ArenaEntry>,
917 handoff_entries: Vec<ArenaEntry>,
918 stable_entries: Vec<ArenaEntry>,
919 scratch_young: Vec<u32>,
920 scratch_yard: Vec<u32>,
921 scratch_handoff: Vec<u32>,
922 scratch_stable: Vec<u32>,
923 peak_usage: ArenaUsage,
924 alloc_space: AllocSpace,
925 pub(crate) type_names: Vec<String>,
926 pub(crate) type_field_names: Vec<Vec<String>>,
927 pub(crate) type_variant_names: Vec<Vec<String>>,
928 pub(crate) type_variant_ctor_ids: Vec<Vec<u32>>,
929 pub(crate) ctor_to_type_variant: Vec<(u32, u16)>,
930 pub(crate) symbol_entries: Vec<ArenaSymbol>,
931}
932
933#[derive(Debug, Clone)]
934pub enum ArenaEntry {
935 Int(i64),
936 String(Rc<str>),
937 List(ArenaList),
938 Tuple(Vec<NanValue>),
939 Map(PersistentMap),
940 Record {
941 type_id: u32,
942 fields: Vec<NanValue>,
943 },
944 Variant {
945 type_id: u32,
946 variant_id: u16,
947 fields: Vec<NanValue>,
948 },
949 Fn(Rc<FunctionValue>),
950 Builtin(Rc<str>),
951 Namespace {
952 name: Rc<str>,
953 members: Vec<(Rc<str>, NanValue)>,
954 },
955 Boxed(NanValue),
956}
957
958#[derive(Debug, Clone)]
959pub enum ArenaSymbol {
960 Fn(Rc<FunctionValue>),
961 Builtin(Rc<str>),
962 Namespace {
963 name: Rc<str>,
964 members: Vec<(Rc<str>, NanValue)>,
965 },
966 NullaryVariant {
967 ctor_id: u32,
968 },
969}
970
971#[derive(Debug, Clone)]
972pub enum ArenaList {
973 Flat {
974 items: Rc<Vec<NanValue>>,
975 start: usize,
976 },
977 Prepend {
978 head: NanValue,
979 tail: NanValue,
980 len: usize,
981 },
982 Concat {
983 left: NanValue,
984 right: NanValue,
985 len: usize,
986 },
987 Segments {
988 current: NanValue,
989 rest: Rc<Vec<NanValue>>,
990 start: usize,
991 len: usize,
992 },
993}
994
995const LIST_APPEND_CHUNK_LIMIT: usize = 128;
996
997#[derive(Debug, Clone, Copy, PartialEq, Eq)]
998enum HeapSpace {
999 Young = 0,
1000 Yard = 1,
1001 Handoff = 2,
1002 Stable = 3,
1003}
1004
1005#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1006pub enum AllocSpace {
1007 Young,
1008 Yard,
1009 Handoff,
1010}
1011
1012#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
1013pub struct ArenaUsage {
1014 pub young: usize,
1015 pub yard: usize,
1016 pub handoff: usize,
1017 pub stable: usize,
1018}
1019
1020impl ArenaUsage {
1021 pub fn total(self) -> usize {
1022 self.young + self.yard + self.handoff + self.stable
1023 }
1024}
1025
1026const HEAP_SPACE_SHIFT: u32 = 30;
1027const HEAP_SPACE_MASK_U32: u32 = 0b11 << HEAP_SPACE_SHIFT;
1028const HEAP_INDEX_MASK_U32: u32 = (1 << HEAP_SPACE_SHIFT) - 1;
1029
1030mod arena;
1031mod compare;
1032mod convert;
1033mod lists;
1034mod memory;
1035
1036#[cfg(test)]
1037#[allow(clippy::approx_constant)]
1038mod tests;