1use std::alloc::{Layout, alloc, dealloc, realloc};
48
49#[repr(C)]
54#[derive(Clone, Copy, Default)]
55pub struct StackValue {
56 pub slot0: u64,
58 pub slot1: u64,
60 pub slot2: u64,
62 pub slot3: u64,
64 pub slot4: u64,
66}
67
68impl std::fmt::Debug for StackValue {
69 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
70 if self.slot0 == 0 {
72 write!(f, "Int({})", self.slot1 as i64)
73 } else if self.slot0 == 2 {
74 write!(f, "Bool({})", self.slot1 != 0)
76 } else {
77 write!(
78 f,
79 "StackValue {{ slot0: 0x{:x}, slot1: 0x{:x}, slot2: 0x{:x}, slot3: 0x{:x}, slot4: 0x{:x} }}",
80 self.slot0, self.slot1, self.slot2, self.slot3, self.slot4
81 )
82 }
83 }
84}
85
86pub const STACK_VALUE_SIZE: usize = std::mem::size_of::<StackValue>();
88
89const _: () = assert!(STACK_VALUE_SIZE == 40, "StackValue must be 40 bytes");
91
92pub type TaggedValue = u64;
94
95pub const TAG_MASK: u64 = 1;
97
98pub const TAG_INT: u64 = 1;
100
101pub const TAG_HEAP: u64 = 0;
103
104#[inline(always)]
106pub const fn is_tagged_int(val: TaggedValue) -> bool {
107 (val & TAG_MASK) == TAG_INT
108}
109
110#[inline(always)]
112pub const fn is_tagged_heap(val: TaggedValue) -> bool {
113 (val & TAG_MASK) == TAG_HEAP
114}
115
116#[inline(always)]
121pub const fn tag_int(val: i64) -> TaggedValue {
122 ((val << 1) as u64) | TAG_INT
124}
125
126#[inline(always)]
130pub const fn untag_int(val: TaggedValue) -> i64 {
131 (val as i64) >> 1
133}
134
135#[inline(always)]
139pub fn tag_heap_ptr(ptr: *mut HeapObject) -> TaggedValue {
140 debug_assert!(
141 (ptr as usize) & 0x7 == 0,
142 "HeapObject pointer must be 8-byte aligned"
143 );
144 ptr as TaggedValue
145}
146
147#[inline(always)]
151pub fn untag_heap_ptr(val: TaggedValue) -> *mut HeapObject {
152 val as *mut HeapObject
153}
154
155#[repr(u8)]
157#[derive(Debug, Clone, Copy, PartialEq, Eq)]
158pub enum HeapTag {
159 Float = 1,
160 Bool = 2,
161 String = 3,
162 Variant = 4,
163 Map = 5,
164 Quotation = 6,
165 Closure = 7,
166}
167
168#[repr(C)]
172pub struct HeapObject {
173 pub tag: u8,
175 pub flags: u8,
177 pub reserved: u16,
179 pub refcount: u32,
181 }
183
184pub mod heap_flags {
186 pub const IS_STATIC: u8 = 0x01;
188}
189
190impl HeapObject {
191 #[inline(always)]
193 pub fn is_static(&self) -> bool {
194 self.flags & heap_flags::IS_STATIC != 0
195 }
196}
197
198#[repr(C)]
200pub struct FloatObject {
201 pub header: HeapObject,
202 pub value: f64,
203}
204
205#[repr(C)]
207pub struct BoolObject {
208 pub header: HeapObject,
209 pub value: bool,
210}
211
212#[repr(C)]
214pub struct QuotationObject {
215 pub header: HeapObject,
216 pub wrapper: usize,
218 pub impl_ptr: usize,
220}
221
222pub const DEFAULT_STACK_CAPACITY: usize = 4096;
224
225#[repr(C)]
232pub struct TaggedStack {
233 pub base: *mut StackValue,
235 pub sp: usize,
237 pub capacity: usize,
239}
240
241impl TaggedStack {
242 pub fn new(capacity: usize) -> Self {
244 let layout = Layout::array::<StackValue>(capacity).expect("stack layout overflow");
245 let base = unsafe { alloc(layout) as *mut StackValue };
246 if base.is_null() {
247 panic!("Failed to allocate tagged stack");
248 }
249
250 TaggedStack {
251 base,
252 sp: 0,
253 capacity,
254 }
255 }
256
257 pub fn with_default_capacity() -> Self {
259 Self::new(DEFAULT_STACK_CAPACITY)
260 }
261
262 #[inline(always)]
264 pub fn depth(&self) -> usize {
265 self.sp
266 }
267
268 #[inline(always)]
270 pub fn is_empty(&self) -> bool {
271 self.sp == 0
272 }
273
274 #[inline(always)]
276 pub fn has_capacity(&self, n: usize) -> bool {
277 self.sp + n <= self.capacity
278 }
279
280 pub fn grow(&mut self, min_capacity: usize) {
284 let new_capacity = (self.capacity * 2).max(min_capacity);
285 let old_layout = Layout::array::<StackValue>(self.capacity).expect("old layout overflow");
286 let new_layout = Layout::array::<StackValue>(new_capacity).expect("new layout overflow");
287
288 let new_base = unsafe {
289 realloc(self.base as *mut u8, old_layout, new_layout.size()) as *mut StackValue
290 };
291
292 if new_base.is_null() {
293 panic!(
294 "Failed to grow tagged stack from {} to {}",
295 self.capacity, new_capacity
296 );
297 }
298
299 self.base = new_base;
300 self.capacity = new_capacity;
301 }
302
303 #[inline]
307 pub fn push(&mut self, val: StackValue) {
308 if self.sp >= self.capacity {
309 self.grow(self.capacity + 1);
310 }
311 unsafe {
312 *self.base.add(self.sp) = val;
313 }
314 self.sp += 1;
315 }
316
317 #[inline]
321 pub fn pop(&mut self) -> StackValue {
322 assert!(self.sp > 0, "pop: stack is empty");
323 self.sp -= 1;
324 unsafe { *self.base.add(self.sp) }
325 }
326
327 #[inline]
331 pub fn peek(&self) -> StackValue {
332 assert!(self.sp > 0, "peek: stack is empty");
333 unsafe { *self.base.add(self.sp - 1) }
334 }
335
336 #[inline(always)]
341 pub fn sp_ptr(&self) -> *mut StackValue {
342 unsafe { self.base.add(self.sp) }
343 }
344
345 #[inline]
348 pub fn push_int(&mut self, val: i64) {
349 self.push(StackValue {
350 slot0: 0, slot1: val as u64,
352 slot2: 0,
353 slot3: 0,
354 slot4: 0,
355 });
356 }
357
358 #[inline]
362 pub fn pop_int(&mut self) -> i64 {
363 let val = self.pop();
364 assert!(
365 val.slot0 == 0,
366 "pop_int: expected Int (discriminant 0), got discriminant {}",
367 val.slot0
368 );
369 val.slot1 as i64
370 }
371
372 pub fn clone_stack(&self) -> Self {
377 use crate::stack::clone_stack_value;
378
379 let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
381 let new_base = unsafe { alloc(layout) as *mut StackValue };
382 if new_base.is_null() {
383 panic!("Failed to allocate cloned stack");
384 }
385
386 for i in 0..self.sp {
388 unsafe {
389 let sv = &*self.base.add(i);
390 let cloned = clone_stack_value(sv);
391 *new_base.add(i) = cloned;
392 }
393 }
394
395 TaggedStack {
396 base: new_base,
397 sp: self.sp,
398 capacity: self.capacity,
399 }
400 }
401}
402
403impl Drop for TaggedStack {
404 fn drop(&mut self) {
405 use crate::stack::drop_stack_value;
406
407 for i in 0..self.sp {
409 unsafe {
410 let sv = *self.base.add(i);
411 drop_stack_value(sv);
412 }
413 }
414
415 if !self.base.is_null() {
417 let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
418 unsafe {
419 dealloc(self.base as *mut u8, layout);
420 }
421 }
422 }
423}
424
425#[unsafe(no_mangle)]
434pub extern "C" fn seq_stack_new(capacity: usize) -> *mut TaggedStack {
435 let stack = Box::new(TaggedStack::new(capacity));
436 Box::into_raw(stack)
437}
438
439#[unsafe(no_mangle)]
441pub extern "C" fn seq_stack_new_default() -> *mut TaggedStack {
442 let stack = Box::new(TaggedStack::with_default_capacity());
443 Box::into_raw(stack)
444}
445
446#[unsafe(no_mangle)]
451pub unsafe extern "C" fn seq_stack_free(stack: *mut TaggedStack) {
452 if !stack.is_null() {
453 unsafe {
454 drop(Box::from_raw(stack));
455 }
456 }
457}
458
459#[unsafe(no_mangle)]
464pub unsafe extern "C" fn seq_stack_grow(stack: *mut TaggedStack, min_capacity: usize) {
465 assert!(!stack.is_null(), "seq_stack_grow: null stack");
466 unsafe {
467 (*stack).grow(min_capacity);
468 }
469}
470
471#[unsafe(no_mangle)]
479pub unsafe extern "C" fn seq_stack_base(stack: *mut TaggedStack) -> *mut StackValue {
480 assert!(!stack.is_null(), "seq_stack_base: null stack");
481 unsafe { (*stack).base }
482}
483
484#[unsafe(no_mangle)]
489pub unsafe extern "C" fn seq_stack_sp(stack: *mut TaggedStack) -> usize {
490 assert!(!stack.is_null(), "seq_stack_sp: null stack");
491 unsafe { (*stack).sp }
492}
493
494#[unsafe(no_mangle)]
499pub unsafe extern "C" fn seq_stack_set_sp(stack: *mut TaggedStack, new_sp: usize) {
500 assert!(!stack.is_null(), "seq_stack_set_sp: null stack");
501 unsafe {
502 assert!(
503 new_sp <= (*stack).capacity,
504 "seq_stack_set_sp: sp exceeds capacity"
505 );
506 (*stack).sp = new_sp;
507 }
508}
509
510#[unsafe(no_mangle)]
515pub unsafe extern "C" fn seq_stack_capacity(stack: *mut TaggedStack) -> usize {
516 assert!(!stack.is_null(), "seq_stack_capacity: null stack");
517 unsafe { (*stack).capacity }
518}
519
520#[unsafe(no_mangle)]
525pub unsafe extern "C" fn seq_stack_clone(stack: *mut TaggedStack) -> *mut TaggedStack {
526 assert!(!stack.is_null(), "seq_stack_clone: null stack");
527 let cloned = unsafe { (*stack).clone_stack() };
528 Box::into_raw(Box::new(cloned))
529}
530
531#[unsafe(no_mangle)]
537pub extern "C" fn seq_alloc_float(value: f64) -> *mut HeapObject {
538 let layout = Layout::new::<FloatObject>();
539 let ptr = unsafe { alloc(layout) as *mut FloatObject };
540 if ptr.is_null() {
541 panic!("Failed to allocate FloatObject");
542 }
543
544 unsafe {
545 (*ptr).header = HeapObject {
546 tag: HeapTag::Float as u8,
547 flags: 0,
548 reserved: 0,
549 refcount: 1,
550 };
551 (*ptr).value = value;
552 }
553
554 ptr as *mut HeapObject
555}
556
557#[unsafe(no_mangle)]
559pub extern "C" fn seq_alloc_bool(value: bool) -> *mut HeapObject {
560 let layout = Layout::new::<BoolObject>();
561 let ptr = unsafe { alloc(layout) as *mut BoolObject };
562 if ptr.is_null() {
563 panic!("Failed to allocate BoolObject");
564 }
565
566 unsafe {
567 (*ptr).header = HeapObject {
568 tag: HeapTag::Bool as u8,
569 flags: 0,
570 reserved: 0,
571 refcount: 1,
572 };
573 (*ptr).value = value;
574 }
575
576 ptr as *mut HeapObject
577}
578
579#[unsafe(no_mangle)]
581pub extern "C" fn seq_alloc_quotation(wrapper: usize, impl_ptr: usize) -> *mut HeapObject {
582 let layout = Layout::new::<QuotationObject>();
583 let ptr = unsafe { alloc(layout) as *mut QuotationObject };
584 if ptr.is_null() {
585 panic!("Failed to allocate QuotationObject");
586 }
587
588 unsafe {
589 (*ptr).header = HeapObject {
590 tag: HeapTag::Quotation as u8,
591 flags: 0,
592 reserved: 0,
593 refcount: 1,
594 };
595 (*ptr).wrapper = wrapper;
596 (*ptr).impl_ptr = impl_ptr;
597 }
598
599 ptr as *mut HeapObject
600}
601
602#[unsafe(no_mangle)]
607pub unsafe extern "C" fn seq_free_heap_object(obj: *mut HeapObject) {
608 if obj.is_null() {
609 return;
610 }
611
612 unsafe {
613 let tag = (*obj).tag;
614 match tag {
615 t if t == HeapTag::Float as u8 => {
616 let layout = Layout::new::<FloatObject>();
617 dealloc(obj as *mut u8, layout);
618 }
619 t if t == HeapTag::Bool as u8 => {
620 let layout = Layout::new::<BoolObject>();
621 dealloc(obj as *mut u8, layout);
622 }
623 t if t == HeapTag::Quotation as u8 => {
624 let layout = Layout::new::<QuotationObject>();
625 dealloc(obj as *mut u8, layout);
626 }
627 _ => {
629 let layout = Layout::new::<HeapObject>();
631 dealloc(obj as *mut u8, layout);
632 }
633 }
634 }
635}
636
637#[unsafe(no_mangle)]
642pub unsafe extern "C" fn seq_heap_incref(obj: *mut HeapObject) {
643 if obj.is_null() {
644 return;
645 }
646 unsafe {
647 if !(*obj).is_static() {
648 let rc = &(*obj).refcount as *const u32 as *mut u32;
649 std::sync::atomic::AtomicU32::from_ptr(rc)
650 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
651 }
652 }
653}
654
655#[unsafe(no_mangle)]
660pub unsafe extern "C" fn seq_heap_decref(obj: *mut HeapObject) {
661 if obj.is_null() {
662 return;
663 }
664 unsafe {
665 if !(*obj).is_static() {
666 let rc = &(*obj).refcount as *const u32 as *mut u32;
667 let old = std::sync::atomic::AtomicU32::from_ptr(rc)
668 .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
669 if old == 1 {
670 seq_free_heap_object(obj);
671 }
672 }
673 }
674}
675
676#[unsafe(no_mangle)]
681pub unsafe extern "C" fn seq_get_float(obj: *mut HeapObject) -> f64 {
682 assert!(!obj.is_null(), "seq_get_float: null object");
683 unsafe {
684 assert!(
685 (*obj).tag == HeapTag::Float as u8,
686 "seq_get_float: not a float"
687 );
688 (*(obj as *mut FloatObject)).value
689 }
690}
691
692#[unsafe(no_mangle)]
697pub unsafe extern "C" fn seq_get_bool(obj: *mut HeapObject) -> bool {
698 assert!(!obj.is_null(), "seq_get_bool: null object");
699 unsafe {
700 assert!(
701 (*obj).tag == HeapTag::Bool as u8,
702 "seq_get_bool: not a bool"
703 );
704 (*(obj as *mut BoolObject)).value
705 }
706}
707
708#[cfg(test)]
713mod tests {
714 use super::*;
715
716 #[test]
717 fn test_tag_untag_int() {
718 assert_eq!(untag_int(tag_int(0)), 0);
720 assert_eq!(untag_int(tag_int(1)), 1);
721 assert_eq!(untag_int(tag_int(42)), 42);
722 assert_eq!(untag_int(tag_int(1000000)), 1000000);
723
724 assert_eq!(untag_int(tag_int(-1)), -1);
726 assert_eq!(untag_int(tag_int(-42)), -42);
727 assert_eq!(untag_int(tag_int(-1000000)), -1000000);
728
729 let max_63bit = (1i64 << 62) - 1;
731 let min_63bit = -(1i64 << 62);
732 assert_eq!(untag_int(tag_int(max_63bit)), max_63bit);
733 assert_eq!(untag_int(tag_int(min_63bit)), min_63bit);
734 }
735
736 #[test]
737 fn test_is_tagged() {
738 assert!(is_tagged_int(tag_int(0)));
740 assert!(is_tagged_int(tag_int(42)));
741 assert!(is_tagged_int(tag_int(-1)));
742
743 let fake_ptr = 0x1000u64; assert!(is_tagged_heap(fake_ptr));
746 assert!(!is_tagged_int(fake_ptr));
747 }
748
749 #[test]
750 fn test_tagged_int_examples() {
751 assert_eq!(tag_int(42), 85);
753 assert_eq!(tag_int(42), 0x55);
754
755 assert_eq!(tag_int(-1), 0xFFFFFFFFFFFFFFFFu64);
757 }
758
759 #[test]
760 fn test_stack_basic_operations() {
761 let mut stack = TaggedStack::new(16);
762
763 assert!(stack.is_empty());
764 assert_eq!(stack.depth(), 0);
765
766 stack.push_int(10);
768 stack.push_int(20);
769 stack.push_int(30);
770
771 assert!(!stack.is_empty());
772 assert_eq!(stack.depth(), 3);
773
774 assert_eq!(stack.pop_int(), 30);
776 assert_eq!(stack.pop_int(), 20);
777 assert_eq!(stack.pop_int(), 10);
778
779 assert!(stack.is_empty());
780 }
781
782 #[test]
783 fn test_stack_peek() {
784 let mut stack = TaggedStack::new(16);
785 stack.push_int(42);
786
787 let peeked = stack.peek();
789 assert_eq!(peeked.slot0, 0); assert_eq!(peeked.slot1 as i64, 42); assert_eq!(stack.depth(), 1); assert_eq!(stack.pop_int(), 42);
794 assert!(stack.is_empty());
795 }
796
797 #[test]
798 fn test_stack_grow() {
799 let mut stack = TaggedStack::new(4);
800
801 for i in 0..100 {
803 stack.push_int(i);
804 }
805
806 assert_eq!(stack.depth(), 100);
807 assert!(stack.capacity >= 100);
808
809 for i in (0..100).rev() {
811 assert_eq!(stack.pop_int(), i);
812 }
813 }
814
815 #[test]
816 fn test_stack_clone() {
817 let mut stack = TaggedStack::new(16);
818 stack.push_int(1);
819 stack.push_int(2);
820 stack.push_int(3);
821
822 let mut cloned = stack.clone_stack();
823
824 assert_eq!(cloned.pop_int(), 3);
826 assert_eq!(cloned.pop_int(), 2);
827 assert_eq!(cloned.pop_int(), 1);
828
829 assert_eq!(stack.pop_int(), 3);
831 assert_eq!(stack.pop_int(), 2);
832 assert_eq!(stack.pop_int(), 1);
833 }
834
835 #[test]
836 fn test_ffi_stack_new_free() {
837 let stack = seq_stack_new(64);
838 assert!(!stack.is_null());
839
840 unsafe {
841 assert_eq!(seq_stack_capacity(stack), 64);
842 assert_eq!(seq_stack_sp(stack), 0);
843
844 seq_stack_free(stack);
845 }
846 }
847
848 #[test]
849 fn test_float_object() {
850 let obj = seq_alloc_float(2.5);
851 assert!(!obj.is_null());
852
853 unsafe {
854 assert_eq!((*obj).tag, HeapTag::Float as u8);
855 assert_eq!((*obj).refcount, 1);
856 assert_eq!(seq_get_float(obj), 2.5);
857
858 assert!((obj as usize) & 0x7 == 0);
860
861 seq_free_heap_object(obj);
862 }
863 }
864
865 #[test]
866 fn test_bool_object() {
867 let obj_true = seq_alloc_bool(true);
868 let obj_false = seq_alloc_bool(false);
869
870 unsafe {
871 assert!(seq_get_bool(obj_true));
872 assert!(!seq_get_bool(obj_false));
873
874 seq_free_heap_object(obj_true);
875 seq_free_heap_object(obj_false);
876 }
877 }
878
879 #[test]
880 fn test_refcount() {
881 let obj = seq_alloc_float(1.0);
882
883 unsafe {
884 assert_eq!((*obj).refcount, 1);
885
886 seq_heap_incref(obj);
887 assert_eq!((*obj).refcount, 2);
888
889 seq_heap_incref(obj);
890 assert_eq!((*obj).refcount, 3);
891
892 seq_heap_decref(obj);
893 assert_eq!((*obj).refcount, 2);
894
895 seq_heap_decref(obj);
896 assert_eq!((*obj).refcount, 1);
897
898 seq_heap_decref(obj);
900 }
902 }
903
904 #[test]
905 fn test_stack_with_heap_objects() {
906 let mut stack = TaggedStack::new(16);
907
908 stack.push_int(42);
910
911 let float_obj = seq_alloc_float(2.5);
913 stack.push(StackValue {
914 slot0: tag_heap_ptr(float_obj),
915 slot1: 0,
916 slot2: 0,
917 slot3: 0,
918 slot4: 0,
919 });
920
921 stack.push_int(100);
923
924 assert_eq!(stack.depth(), 3);
925
926 assert_eq!(stack.pop_int(), 100);
928
929 let val = stack.pop();
930 assert!(is_tagged_heap(val.slot0));
931 unsafe {
932 assert_eq!(seq_get_float(untag_heap_ptr(val.slot0)), 2.5);
933 }
934
935 assert_eq!(stack.pop_int(), 42);
936
937 unsafe {
940 seq_free_heap_object(float_obj);
941 }
942 }
943
944 #[test]
945 fn test_stack_value_size() {
946 assert_eq!(std::mem::size_of::<StackValue>(), 40);
949 assert_eq!(STACK_VALUE_SIZE, 40);
950 }
951}