1use std::alloc::{Layout, alloc, dealloc, realloc};
48
49#[cfg(not(feature = "nanbox"))]
58#[repr(C)]
59#[derive(Clone, Copy, Default)]
60pub struct StackValue {
61 pub slot0: u64,
63 pub slot1: u64,
65 pub slot2: u64,
67 pub slot3: u64,
69 pub slot4: u64,
71}
72
73#[cfg(feature = "nanbox")]
79#[repr(transparent)]
80#[derive(Clone, Copy, Default)]
81pub struct StackValue(pub crate::nanbox::NanBoxedValue);
82
83#[cfg(not(feature = "nanbox"))]
84impl std::fmt::Debug for StackValue {
85 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
86 if self.slot0 == 0 {
88 write!(f, "Int({})", self.slot1 as i64)
89 } else if self.slot0 == 2 {
90 write!(f, "Bool({})", self.slot1 != 0)
92 } else {
93 write!(
94 f,
95 "StackValue {{ slot0: 0x{:x}, slot1: 0x{:x}, slot2: 0x{:x}, slot3: 0x{:x}, slot4: 0x{:x} }}",
96 self.slot0, self.slot1, self.slot2, self.slot3, self.slot4
97 )
98 }
99 }
100}
101
102#[cfg(feature = "nanbox")]
103impl std::fmt::Debug for StackValue {
104 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
105 write!(f, "StackValue({:?})", self.0)
106 }
107}
108
109pub const STACK_VALUE_SIZE: usize = std::mem::size_of::<StackValue>();
113
114#[cfg(not(feature = "nanbox"))]
116const _: () = assert!(
117 STACK_VALUE_SIZE == 40,
118 "StackValue must be 40 bytes without nanbox"
119);
120
121#[cfg(feature = "nanbox")]
122const _: () = assert!(
123 STACK_VALUE_SIZE == 8,
124 "StackValue must be 8 bytes with nanbox"
125);
126
127pub type TaggedValue = u64;
129
130pub const TAG_MASK: u64 = 1;
132
133pub const TAG_INT: u64 = 1;
135
136pub const TAG_HEAP: u64 = 0;
138
139#[inline(always)]
141pub const fn is_tagged_int(val: TaggedValue) -> bool {
142 (val & TAG_MASK) == TAG_INT
143}
144
145#[inline(always)]
147pub const fn is_tagged_heap(val: TaggedValue) -> bool {
148 (val & TAG_MASK) == TAG_HEAP
149}
150
151#[inline(always)]
156pub const fn tag_int(val: i64) -> TaggedValue {
157 ((val << 1) as u64) | TAG_INT
159}
160
161#[inline(always)]
165pub const fn untag_int(val: TaggedValue) -> i64 {
166 (val as i64) >> 1
168}
169
170#[inline(always)]
174pub fn tag_heap_ptr(ptr: *mut HeapObject) -> TaggedValue {
175 debug_assert!(
176 (ptr as usize) & 0x7 == 0,
177 "HeapObject pointer must be 8-byte aligned"
178 );
179 ptr as TaggedValue
180}
181
182#[inline(always)]
186pub fn untag_heap_ptr(val: TaggedValue) -> *mut HeapObject {
187 val as *mut HeapObject
188}
189
190#[repr(u8)]
192#[derive(Debug, Clone, Copy, PartialEq, Eq)]
193pub enum HeapTag {
194 Float = 1,
195 Bool = 2,
196 String = 3,
197 Variant = 4,
198 Map = 5,
199 Quotation = 6,
200 Closure = 7,
201}
202
203#[repr(C)]
207pub struct HeapObject {
208 pub tag: u8,
210 pub flags: u8,
212 pub reserved: u16,
214 pub refcount: u32,
216 }
218
219pub mod heap_flags {
221 pub const IS_STATIC: u8 = 0x01;
223}
224
225impl HeapObject {
226 #[inline(always)]
228 pub fn is_static(&self) -> bool {
229 self.flags & heap_flags::IS_STATIC != 0
230 }
231}
232
233#[repr(C)]
235pub struct FloatObject {
236 pub header: HeapObject,
237 pub value: f64,
238}
239
240#[repr(C)]
242pub struct BoolObject {
243 pub header: HeapObject,
244 pub value: bool,
245}
246
247#[repr(C)]
249pub struct QuotationObject {
250 pub header: HeapObject,
251 pub wrapper: usize,
253 pub impl_ptr: usize,
255}
256
257pub const DEFAULT_STACK_CAPACITY: usize = 4096;
259
260#[repr(C)]
267pub struct TaggedStack {
268 pub base: *mut StackValue,
270 pub sp: usize,
272 pub capacity: usize,
274}
275
276impl TaggedStack {
277 pub fn new(capacity: usize) -> Self {
279 let layout = Layout::array::<StackValue>(capacity).expect("stack layout overflow");
280 let base = unsafe { alloc(layout) as *mut StackValue };
281 if base.is_null() {
282 panic!("Failed to allocate tagged stack");
283 }
284
285 TaggedStack {
286 base,
287 sp: 0,
288 capacity,
289 }
290 }
291
292 pub fn with_default_capacity() -> Self {
294 Self::new(DEFAULT_STACK_CAPACITY)
295 }
296
297 #[inline(always)]
299 pub fn depth(&self) -> usize {
300 self.sp
301 }
302
303 #[inline(always)]
305 pub fn is_empty(&self) -> bool {
306 self.sp == 0
307 }
308
309 #[inline(always)]
311 pub fn has_capacity(&self, n: usize) -> bool {
312 self.sp + n <= self.capacity
313 }
314
315 pub fn grow(&mut self, min_capacity: usize) {
319 let new_capacity = (self.capacity * 2).max(min_capacity);
320 let old_layout = Layout::array::<StackValue>(self.capacity).expect("old layout overflow");
321 let new_layout = Layout::array::<StackValue>(new_capacity).expect("new layout overflow");
322
323 let new_base = unsafe {
324 realloc(self.base as *mut u8, old_layout, new_layout.size()) as *mut StackValue
325 };
326
327 if new_base.is_null() {
328 panic!(
329 "Failed to grow tagged stack from {} to {}",
330 self.capacity, new_capacity
331 );
332 }
333
334 self.base = new_base;
335 self.capacity = new_capacity;
336 }
337
338 #[inline]
342 pub fn push(&mut self, val: StackValue) {
343 if self.sp >= self.capacity {
344 self.grow(self.capacity + 1);
345 }
346 unsafe {
347 *self.base.add(self.sp) = val;
348 }
349 self.sp += 1;
350 }
351
352 #[inline]
356 pub fn pop(&mut self) -> StackValue {
357 assert!(self.sp > 0, "pop: stack is empty");
358 self.sp -= 1;
359 unsafe { *self.base.add(self.sp) }
360 }
361
362 #[inline]
366 pub fn peek(&self) -> StackValue {
367 assert!(self.sp > 0, "peek: stack is empty");
368 unsafe { *self.base.add(self.sp - 1) }
369 }
370
371 #[inline(always)]
376 pub fn sp_ptr(&self) -> *mut StackValue {
377 unsafe { self.base.add(self.sp) }
378 }
379
380 #[cfg(not(feature = "nanbox"))]
383 #[inline]
384 pub fn push_int(&mut self, val: i64) {
385 self.push(StackValue {
386 slot0: 0, slot1: val as u64,
388 slot2: 0,
389 slot3: 0,
390 slot4: 0,
391 });
392 }
393
394 #[cfg(feature = "nanbox")]
396 #[inline]
397 pub fn push_int(&mut self, val: i64) {
398 self.push(StackValue(crate::nanbox::NanBoxedValue::from_int(val)));
399 }
400
401 #[cfg(not(feature = "nanbox"))]
405 #[inline]
406 pub fn pop_int(&mut self) -> i64 {
407 let val = self.pop();
408 assert!(
409 val.slot0 == 0,
410 "pop_int: expected Int (discriminant 0), got discriminant {}",
411 val.slot0
412 );
413 val.slot1 as i64
414 }
415
416 #[cfg(feature = "nanbox")]
420 #[inline]
421 pub fn pop_int(&mut self) -> i64 {
422 let val = self.pop();
423 assert!(val.0.is_int(), "pop_int: expected Int");
424 val.0.as_int()
425 }
426
427 pub fn clone_stack(&self) -> Self {
432 use crate::stack::clone_stack_value;
433
434 let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
436 let new_base = unsafe { alloc(layout) as *mut StackValue };
437 if new_base.is_null() {
438 panic!("Failed to allocate cloned stack");
439 }
440
441 for i in 0..self.sp {
443 unsafe {
444 let sv = &*self.base.add(i);
445 let cloned = clone_stack_value(sv);
446 *new_base.add(i) = cloned;
447 }
448 }
449
450 TaggedStack {
451 base: new_base,
452 sp: self.sp,
453 capacity: self.capacity,
454 }
455 }
456}
457
458impl Drop for TaggedStack {
459 fn drop(&mut self) {
460 use crate::stack::drop_stack_value;
461
462 for i in 0..self.sp {
464 unsafe {
465 let sv = *self.base.add(i);
466 drop_stack_value(sv);
467 }
468 }
469
470 if !self.base.is_null() {
472 let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
473 unsafe {
474 dealloc(self.base as *mut u8, layout);
475 }
476 }
477 }
478}
479
480#[unsafe(no_mangle)]
489pub extern "C" fn seq_stack_new(capacity: usize) -> *mut TaggedStack {
490 let stack = Box::new(TaggedStack::new(capacity));
491 Box::into_raw(stack)
492}
493
494#[unsafe(no_mangle)]
496pub extern "C" fn seq_stack_new_default() -> *mut TaggedStack {
497 let stack = Box::new(TaggedStack::with_default_capacity());
498 Box::into_raw(stack)
499}
500
501#[unsafe(no_mangle)]
506pub unsafe extern "C" fn seq_stack_free(stack: *mut TaggedStack) {
507 if !stack.is_null() {
508 unsafe {
509 drop(Box::from_raw(stack));
510 }
511 }
512}
513
514#[unsafe(no_mangle)]
519pub unsafe extern "C" fn seq_stack_grow(stack: *mut TaggedStack, min_capacity: usize) {
520 assert!(!stack.is_null(), "seq_stack_grow: null stack");
521 unsafe {
522 (*stack).grow(min_capacity);
523 }
524}
525
526#[unsafe(no_mangle)]
534pub unsafe extern "C" fn seq_stack_base(stack: *mut TaggedStack) -> *mut StackValue {
535 assert!(!stack.is_null(), "seq_stack_base: null stack");
536 unsafe { (*stack).base }
537}
538
539#[unsafe(no_mangle)]
544pub unsafe extern "C" fn seq_stack_sp(stack: *mut TaggedStack) -> usize {
545 assert!(!stack.is_null(), "seq_stack_sp: null stack");
546 unsafe { (*stack).sp }
547}
548
549#[unsafe(no_mangle)]
554pub unsafe extern "C" fn seq_stack_set_sp(stack: *mut TaggedStack, new_sp: usize) {
555 assert!(!stack.is_null(), "seq_stack_set_sp: null stack");
556 unsafe {
557 assert!(
558 new_sp <= (*stack).capacity,
559 "seq_stack_set_sp: sp exceeds capacity"
560 );
561 (*stack).sp = new_sp;
562 }
563}
564
565#[unsafe(no_mangle)]
570pub unsafe extern "C" fn seq_stack_capacity(stack: *mut TaggedStack) -> usize {
571 assert!(!stack.is_null(), "seq_stack_capacity: null stack");
572 unsafe { (*stack).capacity }
573}
574
575#[unsafe(no_mangle)]
580pub unsafe extern "C" fn seq_stack_clone(stack: *mut TaggedStack) -> *mut TaggedStack {
581 assert!(!stack.is_null(), "seq_stack_clone: null stack");
582 let cloned = unsafe { (*stack).clone_stack() };
583 Box::into_raw(Box::new(cloned))
584}
585
586#[unsafe(no_mangle)]
592pub extern "C" fn seq_alloc_float(value: f64) -> *mut HeapObject {
593 let layout = Layout::new::<FloatObject>();
594 let ptr = unsafe { alloc(layout) as *mut FloatObject };
595 if ptr.is_null() {
596 panic!("Failed to allocate FloatObject");
597 }
598
599 unsafe {
600 (*ptr).header = HeapObject {
601 tag: HeapTag::Float as u8,
602 flags: 0,
603 reserved: 0,
604 refcount: 1,
605 };
606 (*ptr).value = value;
607 }
608
609 ptr as *mut HeapObject
610}
611
612#[unsafe(no_mangle)]
614pub extern "C" fn seq_alloc_bool(value: bool) -> *mut HeapObject {
615 let layout = Layout::new::<BoolObject>();
616 let ptr = unsafe { alloc(layout) as *mut BoolObject };
617 if ptr.is_null() {
618 panic!("Failed to allocate BoolObject");
619 }
620
621 unsafe {
622 (*ptr).header = HeapObject {
623 tag: HeapTag::Bool as u8,
624 flags: 0,
625 reserved: 0,
626 refcount: 1,
627 };
628 (*ptr).value = value;
629 }
630
631 ptr as *mut HeapObject
632}
633
634#[unsafe(no_mangle)]
636pub extern "C" fn seq_alloc_quotation(wrapper: usize, impl_ptr: usize) -> *mut HeapObject {
637 let layout = Layout::new::<QuotationObject>();
638 let ptr = unsafe { alloc(layout) as *mut QuotationObject };
639 if ptr.is_null() {
640 panic!("Failed to allocate QuotationObject");
641 }
642
643 unsafe {
644 (*ptr).header = HeapObject {
645 tag: HeapTag::Quotation as u8,
646 flags: 0,
647 reserved: 0,
648 refcount: 1,
649 };
650 (*ptr).wrapper = wrapper;
651 (*ptr).impl_ptr = impl_ptr;
652 }
653
654 ptr as *mut HeapObject
655}
656
657#[unsafe(no_mangle)]
662pub unsafe extern "C" fn seq_free_heap_object(obj: *mut HeapObject) {
663 if obj.is_null() {
664 return;
665 }
666
667 unsafe {
668 let tag = (*obj).tag;
669 match tag {
670 t if t == HeapTag::Float as u8 => {
671 let layout = Layout::new::<FloatObject>();
672 dealloc(obj as *mut u8, layout);
673 }
674 t if t == HeapTag::Bool as u8 => {
675 let layout = Layout::new::<BoolObject>();
676 dealloc(obj as *mut u8, layout);
677 }
678 t if t == HeapTag::Quotation as u8 => {
679 let layout = Layout::new::<QuotationObject>();
680 dealloc(obj as *mut u8, layout);
681 }
682 _ => {
686 let layout = Layout::new::<HeapObject>();
688 dealloc(obj as *mut u8, layout);
689 }
690 }
691 }
692}
693
694#[unsafe(no_mangle)]
699pub unsafe extern "C" fn seq_heap_incref(obj: *mut HeapObject) {
700 if obj.is_null() {
701 return;
702 }
703 unsafe {
704 if !(*obj).is_static() {
705 let rc = &(*obj).refcount as *const u32 as *mut u32;
706 std::sync::atomic::AtomicU32::from_ptr(rc)
707 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
708 }
709 }
710}
711
712#[unsafe(no_mangle)]
717pub unsafe extern "C" fn seq_heap_decref(obj: *mut HeapObject) {
718 if obj.is_null() {
719 return;
720 }
721 unsafe {
722 if !(*obj).is_static() {
723 let rc = &(*obj).refcount as *const u32 as *mut u32;
724 let old = std::sync::atomic::AtomicU32::from_ptr(rc)
725 .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
726 if old == 1 {
727 seq_free_heap_object(obj);
728 }
729 }
730 }
731}
732
733#[unsafe(no_mangle)]
738pub unsafe extern "C" fn seq_get_float(obj: *mut HeapObject) -> f64 {
739 assert!(!obj.is_null(), "seq_get_float: null object");
740 unsafe {
741 assert!(
742 (*obj).tag == HeapTag::Float as u8,
743 "seq_get_float: not a float"
744 );
745 (*(obj as *mut FloatObject)).value
746 }
747}
748
749#[unsafe(no_mangle)]
754pub unsafe extern "C" fn seq_get_bool(obj: *mut HeapObject) -> bool {
755 assert!(!obj.is_null(), "seq_get_bool: null object");
756 unsafe {
757 assert!(
758 (*obj).tag == HeapTag::Bool as u8,
759 "seq_get_bool: not a bool"
760 );
761 (*(obj as *mut BoolObject)).value
762 }
763}
764
765#[cfg(test)]
770mod tests {
771 use super::*;
772
773 #[test]
774 fn test_tag_untag_int() {
775 assert_eq!(untag_int(tag_int(0)), 0);
777 assert_eq!(untag_int(tag_int(1)), 1);
778 assert_eq!(untag_int(tag_int(42)), 42);
779 assert_eq!(untag_int(tag_int(1000000)), 1000000);
780
781 assert_eq!(untag_int(tag_int(-1)), -1);
783 assert_eq!(untag_int(tag_int(-42)), -42);
784 assert_eq!(untag_int(tag_int(-1000000)), -1000000);
785
786 let max_63bit = (1i64 << 62) - 1;
788 let min_63bit = -(1i64 << 62);
789 assert_eq!(untag_int(tag_int(max_63bit)), max_63bit);
790 assert_eq!(untag_int(tag_int(min_63bit)), min_63bit);
791 }
792
793 #[test]
794 fn test_is_tagged() {
795 assert!(is_tagged_int(tag_int(0)));
797 assert!(is_tagged_int(tag_int(42)));
798 assert!(is_tagged_int(tag_int(-1)));
799
800 let fake_ptr = 0x1000u64; assert!(is_tagged_heap(fake_ptr));
803 assert!(!is_tagged_int(fake_ptr));
804 }
805
806 #[test]
807 fn test_tagged_int_examples() {
808 assert_eq!(tag_int(42), 85);
810 assert_eq!(tag_int(42), 0x55);
811
812 assert_eq!(tag_int(-1), 0xFFFFFFFFFFFFFFFFu64);
814 }
815
816 #[test]
817 fn test_stack_basic_operations() {
818 let mut stack = TaggedStack::new(16);
819
820 assert!(stack.is_empty());
821 assert_eq!(stack.depth(), 0);
822
823 stack.push_int(10);
825 stack.push_int(20);
826 stack.push_int(30);
827
828 assert!(!stack.is_empty());
829 assert_eq!(stack.depth(), 3);
830
831 assert_eq!(stack.pop_int(), 30);
833 assert_eq!(stack.pop_int(), 20);
834 assert_eq!(stack.pop_int(), 10);
835
836 assert!(stack.is_empty());
837 }
838
839 #[test]
840 #[cfg(not(feature = "nanbox"))]
841 fn test_stack_peek() {
842 let mut stack = TaggedStack::new(16);
843 stack.push_int(42);
844
845 let peeked = stack.peek();
847 assert_eq!(peeked.slot0, 0); assert_eq!(peeked.slot1 as i64, 42); assert_eq!(stack.depth(), 1); assert_eq!(stack.pop_int(), 42);
852 assert!(stack.is_empty());
853 }
854
855 #[test]
856 #[cfg(feature = "nanbox")]
857 fn test_stack_peek_nanbox() {
858 let mut stack = TaggedStack::new(16);
859 stack.push_int(42);
860
861 let peeked = stack.peek();
863 assert!(peeked.0.is_int());
864 assert_eq!(peeked.0.as_int(), 42);
865 assert_eq!(stack.depth(), 1); assert_eq!(stack.pop_int(), 42);
868 assert!(stack.is_empty());
869 }
870
871 #[test]
872 fn test_stack_grow() {
873 let mut stack = TaggedStack::new(4);
874
875 for i in 0..100 {
877 stack.push_int(i);
878 }
879
880 assert_eq!(stack.depth(), 100);
881 assert!(stack.capacity >= 100);
882
883 for i in (0..100).rev() {
885 assert_eq!(stack.pop_int(), i);
886 }
887 }
888
889 #[test]
890 fn test_stack_clone() {
891 let mut stack = TaggedStack::new(16);
892 stack.push_int(1);
893 stack.push_int(2);
894 stack.push_int(3);
895
896 let mut cloned = stack.clone_stack();
897
898 assert_eq!(cloned.pop_int(), 3);
900 assert_eq!(cloned.pop_int(), 2);
901 assert_eq!(cloned.pop_int(), 1);
902
903 assert_eq!(stack.pop_int(), 3);
905 assert_eq!(stack.pop_int(), 2);
906 assert_eq!(stack.pop_int(), 1);
907 }
908
909 #[test]
910 fn test_ffi_stack_new_free() {
911 let stack = seq_stack_new(64);
912 assert!(!stack.is_null());
913
914 unsafe {
915 assert_eq!(seq_stack_capacity(stack), 64);
916 assert_eq!(seq_stack_sp(stack), 0);
917
918 seq_stack_free(stack);
919 }
920 }
921
922 #[test]
923 fn test_float_object() {
924 let obj = seq_alloc_float(2.5);
925 assert!(!obj.is_null());
926
927 unsafe {
928 assert_eq!((*obj).tag, HeapTag::Float as u8);
929 assert_eq!((*obj).refcount, 1);
930 assert_eq!(seq_get_float(obj), 2.5);
931
932 assert!((obj as usize) & 0x7 == 0);
934
935 seq_free_heap_object(obj);
936 }
937 }
938
939 #[test]
940 fn test_bool_object() {
941 let obj_true = seq_alloc_bool(true);
942 let obj_false = seq_alloc_bool(false);
943
944 unsafe {
945 assert!(seq_get_bool(obj_true));
946 assert!(!seq_get_bool(obj_false));
947
948 seq_free_heap_object(obj_true);
949 seq_free_heap_object(obj_false);
950 }
951 }
952
953 #[test]
954 fn test_refcount() {
955 let obj = seq_alloc_float(1.0);
956
957 unsafe {
958 assert_eq!((*obj).refcount, 1);
959
960 seq_heap_incref(obj);
961 assert_eq!((*obj).refcount, 2);
962
963 seq_heap_incref(obj);
964 assert_eq!((*obj).refcount, 3);
965
966 seq_heap_decref(obj);
967 assert_eq!((*obj).refcount, 2);
968
969 seq_heap_decref(obj);
970 assert_eq!((*obj).refcount, 1);
971
972 seq_heap_decref(obj);
974 }
976 }
977
978 #[test]
979 #[cfg(not(feature = "nanbox"))]
980 fn test_stack_with_heap_objects() {
981 let mut stack = TaggedStack::new(16);
982
983 stack.push_int(42);
985
986 let float_obj = seq_alloc_float(2.5);
988 stack.push(StackValue {
989 slot0: tag_heap_ptr(float_obj),
990 slot1: 0,
991 slot2: 0,
992 slot3: 0,
993 slot4: 0,
994 });
995
996 stack.push_int(100);
998
999 assert_eq!(stack.depth(), 3);
1000
1001 assert_eq!(stack.pop_int(), 100);
1003
1004 let val = stack.pop();
1005 assert!(is_tagged_heap(val.slot0));
1006 unsafe {
1007 assert_eq!(seq_get_float(untag_heap_ptr(val.slot0)), 2.5);
1008 }
1009
1010 assert_eq!(stack.pop_int(), 42);
1011
1012 unsafe {
1015 seq_free_heap_object(float_obj);
1016 }
1017 }
1018
1019 #[test]
1020 #[cfg(not(feature = "nanbox"))]
1021 fn test_stack_value_size() {
1022 assert_eq!(std::mem::size_of::<StackValue>(), 40);
1025 assert_eq!(STACK_VALUE_SIZE, 40);
1026 }
1027
1028 #[test]
1029 #[cfg(feature = "nanbox")]
1030 fn test_stack_value_size_nanbox() {
1031 assert_eq!(std::mem::size_of::<StackValue>(), 8);
1033 assert_eq!(STACK_VALUE_SIZE, 8);
1034 }
1035}