1use std::alloc::{Layout, alloc, dealloc, realloc};
48
49#[repr(C)]
58#[derive(Clone, Copy, Default)]
59pub struct StackValue {
60 pub slot0: u64,
62 pub slot1: u64,
64 pub slot2: u64,
66 pub slot3: u64,
68 pub slot4: u64,
70}
71
72impl std::fmt::Debug for StackValue {
73 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
74 if self.slot0 == 0 {
76 write!(f, "Int({})", self.slot1 as i64)
77 } else if self.slot0 == 2 {
78 write!(f, "Bool({})", self.slot1 != 0)
80 } else {
81 write!(
82 f,
83 "StackValue {{ slot0: 0x{:x}, slot1: 0x{:x}, slot2: 0x{:x}, slot3: 0x{:x}, slot4: 0x{:x} }}",
84 self.slot0, self.slot1, self.slot2, self.slot3, self.slot4
85 )
86 }
87 }
88}
89
90pub const STACK_VALUE_SIZE: usize = std::mem::size_of::<StackValue>();
92
93const _: () = assert!(STACK_VALUE_SIZE == 40, "StackValue must be 40 bytes");
95
96pub type TaggedValue = u64;
98
99pub const TAG_MASK: u64 = 1;
101
102pub const TAG_INT: u64 = 1;
104
105pub const TAG_HEAP: u64 = 0;
107
108#[inline(always)]
110pub const fn is_tagged_int(val: TaggedValue) -> bool {
111 (val & TAG_MASK) == TAG_INT
112}
113
114#[inline(always)]
116pub const fn is_tagged_heap(val: TaggedValue) -> bool {
117 (val & TAG_MASK) == TAG_HEAP
118}
119
120#[inline(always)]
125pub const fn tag_int(val: i64) -> TaggedValue {
126 ((val << 1) as u64) | TAG_INT
128}
129
130#[inline(always)]
134pub const fn untag_int(val: TaggedValue) -> i64 {
135 (val as i64) >> 1
137}
138
139#[inline(always)]
143pub fn tag_heap_ptr(ptr: *mut HeapObject) -> TaggedValue {
144 debug_assert!(
145 (ptr as usize) & 0x7 == 0,
146 "HeapObject pointer must be 8-byte aligned"
147 );
148 ptr as TaggedValue
149}
150
151#[inline(always)]
155pub fn untag_heap_ptr(val: TaggedValue) -> *mut HeapObject {
156 val as *mut HeapObject
157}
158
159#[repr(u8)]
161#[derive(Debug, Clone, Copy, PartialEq, Eq)]
162pub enum HeapTag {
163 Float = 1,
164 Bool = 2,
165 String = 3,
166 Variant = 4,
167 Map = 5,
168 Quotation = 6,
169 Closure = 7,
170}
171
172#[repr(C)]
176pub struct HeapObject {
177 pub tag: u8,
179 pub flags: u8,
181 pub reserved: u16,
183 pub refcount: u32,
185 }
187
188pub mod heap_flags {
190 pub const IS_STATIC: u8 = 0x01;
192}
193
194impl HeapObject {
195 #[inline(always)]
197 pub fn is_static(&self) -> bool {
198 self.flags & heap_flags::IS_STATIC != 0
199 }
200}
201
202#[repr(C)]
204pub struct FloatObject {
205 pub header: HeapObject,
206 pub value: f64,
207}
208
209#[repr(C)]
211pub struct BoolObject {
212 pub header: HeapObject,
213 pub value: bool,
214}
215
216#[repr(C)]
218pub struct QuotationObject {
219 pub header: HeapObject,
220 pub wrapper: usize,
222 pub impl_ptr: usize,
224}
225
226pub const DEFAULT_STACK_CAPACITY: usize = 4096;
228
229#[repr(C)]
236pub struct TaggedStack {
237 pub base: *mut StackValue,
239 pub sp: usize,
241 pub capacity: usize,
243}
244
245impl TaggedStack {
246 pub fn new(capacity: usize) -> Self {
248 let layout = Layout::array::<StackValue>(capacity).expect("stack layout overflow");
249 let base = unsafe { alloc(layout) as *mut StackValue };
250 if base.is_null() {
251 panic!("Failed to allocate tagged stack");
252 }
253
254 TaggedStack {
255 base,
256 sp: 0,
257 capacity,
258 }
259 }
260
261 pub fn with_default_capacity() -> Self {
263 Self::new(DEFAULT_STACK_CAPACITY)
264 }
265
266 #[inline(always)]
268 pub fn depth(&self) -> usize {
269 self.sp
270 }
271
272 #[inline(always)]
274 pub fn is_empty(&self) -> bool {
275 self.sp == 0
276 }
277
278 #[inline(always)]
280 pub fn has_capacity(&self, n: usize) -> bool {
281 self.sp + n <= self.capacity
282 }
283
284 pub fn grow(&mut self, min_capacity: usize) {
288 let new_capacity = (self.capacity * 2).max(min_capacity);
289 let old_layout = Layout::array::<StackValue>(self.capacity).expect("old layout overflow");
290 let new_layout = Layout::array::<StackValue>(new_capacity).expect("new layout overflow");
291
292 let new_base = unsafe {
293 realloc(self.base as *mut u8, old_layout, new_layout.size()) as *mut StackValue
294 };
295
296 if new_base.is_null() {
297 panic!(
298 "Failed to grow tagged stack from {} to {}",
299 self.capacity, new_capacity
300 );
301 }
302
303 self.base = new_base;
304 self.capacity = new_capacity;
305 }
306
307 #[inline]
311 pub fn push(&mut self, val: StackValue) {
312 if self.sp >= self.capacity {
313 self.grow(self.capacity + 1);
314 }
315 unsafe {
316 *self.base.add(self.sp) = val;
317 }
318 self.sp += 1;
319 }
320
321 #[inline]
325 pub fn pop(&mut self) -> StackValue {
326 assert!(self.sp > 0, "pop: stack is empty");
327 self.sp -= 1;
328 unsafe { *self.base.add(self.sp) }
329 }
330
331 #[inline]
335 pub fn peek(&self) -> StackValue {
336 assert!(self.sp > 0, "peek: stack is empty");
337 unsafe { *self.base.add(self.sp - 1) }
338 }
339
340 #[inline(always)]
345 pub fn sp_ptr(&self) -> *mut StackValue {
346 unsafe { self.base.add(self.sp) }
347 }
348
349 #[inline]
352 pub fn push_int(&mut self, val: i64) {
353 self.push(StackValue {
354 slot0: 0, slot1: val as u64,
356 slot2: 0,
357 slot3: 0,
358 slot4: 0,
359 });
360 }
361
362 #[inline]
366 pub fn pop_int(&mut self) -> i64 {
367 let val = self.pop();
368 assert!(
369 val.slot0 == 0,
370 "pop_int: expected Int (discriminant 0), got discriminant {}",
371 val.slot0
372 );
373 val.slot1 as i64
374 }
375
376 pub fn clone_stack(&self) -> Self {
381 use crate::stack::clone_stack_value;
382
383 let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
385 let new_base = unsafe { alloc(layout) as *mut StackValue };
386 if new_base.is_null() {
387 panic!("Failed to allocate cloned stack");
388 }
389
390 for i in 0..self.sp {
392 unsafe {
393 let sv = &*self.base.add(i);
394 let cloned = clone_stack_value(sv);
395 *new_base.add(i) = cloned;
396 }
397 }
398
399 TaggedStack {
400 base: new_base,
401 sp: self.sp,
402 capacity: self.capacity,
403 }
404 }
405}
406
407impl Drop for TaggedStack {
408 fn drop(&mut self) {
409 use crate::stack::drop_stack_value;
410
411 for i in 0..self.sp {
413 unsafe {
414 let sv = *self.base.add(i);
415 drop_stack_value(sv);
416 }
417 }
418
419 if !self.base.is_null() {
421 let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
422 unsafe {
423 dealloc(self.base as *mut u8, layout);
424 }
425 }
426 }
427}
428
429#[unsafe(no_mangle)]
438pub extern "C" fn seq_stack_new(capacity: usize) -> *mut TaggedStack {
439 let stack = Box::new(TaggedStack::new(capacity));
440 Box::into_raw(stack)
441}
442
443#[unsafe(no_mangle)]
445pub extern "C" fn seq_stack_new_default() -> *mut TaggedStack {
446 let stack = Box::new(TaggedStack::with_default_capacity());
447 Box::into_raw(stack)
448}
449
450#[unsafe(no_mangle)]
455pub unsafe extern "C" fn seq_stack_free(stack: *mut TaggedStack) {
456 if !stack.is_null() {
457 unsafe {
458 drop(Box::from_raw(stack));
459 }
460 }
461}
462
463#[unsafe(no_mangle)]
468pub unsafe extern "C" fn seq_stack_grow(stack: *mut TaggedStack, min_capacity: usize) {
469 assert!(!stack.is_null(), "seq_stack_grow: null stack");
470 unsafe {
471 (*stack).grow(min_capacity);
472 }
473}
474
475#[unsafe(no_mangle)]
483pub unsafe extern "C" fn seq_stack_base(stack: *mut TaggedStack) -> *mut StackValue {
484 assert!(!stack.is_null(), "seq_stack_base: null stack");
485 unsafe { (*stack).base }
486}
487
488#[unsafe(no_mangle)]
493pub unsafe extern "C" fn seq_stack_sp(stack: *mut TaggedStack) -> usize {
494 assert!(!stack.is_null(), "seq_stack_sp: null stack");
495 unsafe { (*stack).sp }
496}
497
498#[unsafe(no_mangle)]
503pub unsafe extern "C" fn seq_stack_set_sp(stack: *mut TaggedStack, new_sp: usize) {
504 assert!(!stack.is_null(), "seq_stack_set_sp: null stack");
505 unsafe {
506 assert!(
507 new_sp <= (*stack).capacity,
508 "seq_stack_set_sp: sp exceeds capacity"
509 );
510 (*stack).sp = new_sp;
511 }
512}
513
514#[unsafe(no_mangle)]
519pub unsafe extern "C" fn seq_stack_capacity(stack: *mut TaggedStack) -> usize {
520 assert!(!stack.is_null(), "seq_stack_capacity: null stack");
521 unsafe { (*stack).capacity }
522}
523
524#[unsafe(no_mangle)]
529pub unsafe extern "C" fn seq_stack_clone(stack: *mut TaggedStack) -> *mut TaggedStack {
530 assert!(!stack.is_null(), "seq_stack_clone: null stack");
531 let cloned = unsafe { (*stack).clone_stack() };
532 Box::into_raw(Box::new(cloned))
533}
534
535#[unsafe(no_mangle)]
541pub extern "C" fn seq_alloc_float(value: f64) -> *mut HeapObject {
542 let layout = Layout::new::<FloatObject>();
543 let ptr = unsafe { alloc(layout) as *mut FloatObject };
544 if ptr.is_null() {
545 panic!("Failed to allocate FloatObject");
546 }
547
548 unsafe {
549 (*ptr).header = HeapObject {
550 tag: HeapTag::Float as u8,
551 flags: 0,
552 reserved: 0,
553 refcount: 1,
554 };
555 (*ptr).value = value;
556 }
557
558 ptr as *mut HeapObject
559}
560
561#[unsafe(no_mangle)]
563pub extern "C" fn seq_alloc_bool(value: bool) -> *mut HeapObject {
564 let layout = Layout::new::<BoolObject>();
565 let ptr = unsafe { alloc(layout) as *mut BoolObject };
566 if ptr.is_null() {
567 panic!("Failed to allocate BoolObject");
568 }
569
570 unsafe {
571 (*ptr).header = HeapObject {
572 tag: HeapTag::Bool as u8,
573 flags: 0,
574 reserved: 0,
575 refcount: 1,
576 };
577 (*ptr).value = value;
578 }
579
580 ptr as *mut HeapObject
581}
582
583#[unsafe(no_mangle)]
585pub extern "C" fn seq_alloc_quotation(wrapper: usize, impl_ptr: usize) -> *mut HeapObject {
586 let layout = Layout::new::<QuotationObject>();
587 let ptr = unsafe { alloc(layout) as *mut QuotationObject };
588 if ptr.is_null() {
589 panic!("Failed to allocate QuotationObject");
590 }
591
592 unsafe {
593 (*ptr).header = HeapObject {
594 tag: HeapTag::Quotation as u8,
595 flags: 0,
596 reserved: 0,
597 refcount: 1,
598 };
599 (*ptr).wrapper = wrapper;
600 (*ptr).impl_ptr = impl_ptr;
601 }
602
603 ptr as *mut HeapObject
604}
605
606#[unsafe(no_mangle)]
611pub unsafe extern "C" fn seq_free_heap_object(obj: *mut HeapObject) {
612 if obj.is_null() {
613 return;
614 }
615
616 unsafe {
617 let tag = (*obj).tag;
618 match tag {
619 t if t == HeapTag::Float as u8 => {
620 let layout = Layout::new::<FloatObject>();
621 dealloc(obj as *mut u8, layout);
622 }
623 t if t == HeapTag::Bool as u8 => {
624 let layout = Layout::new::<BoolObject>();
625 dealloc(obj as *mut u8, layout);
626 }
627 t if t == HeapTag::Quotation as u8 => {
628 let layout = Layout::new::<QuotationObject>();
629 dealloc(obj as *mut u8, layout);
630 }
631 _ => {
635 let layout = Layout::new::<HeapObject>();
637 dealloc(obj as *mut u8, layout);
638 }
639 }
640 }
641}
642
643#[unsafe(no_mangle)]
648pub unsafe extern "C" fn seq_heap_incref(obj: *mut HeapObject) {
649 if obj.is_null() {
650 return;
651 }
652 unsafe {
653 if !(*obj).is_static() {
654 let rc = &(*obj).refcount as *const u32 as *mut u32;
655 std::sync::atomic::AtomicU32::from_ptr(rc)
656 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
657 }
658 }
659}
660
661#[unsafe(no_mangle)]
666pub unsafe extern "C" fn seq_heap_decref(obj: *mut HeapObject) {
667 if obj.is_null() {
668 return;
669 }
670 unsafe {
671 if !(*obj).is_static() {
672 let rc = &(*obj).refcount as *const u32 as *mut u32;
673 let old = std::sync::atomic::AtomicU32::from_ptr(rc)
674 .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
675 if old == 1 {
676 seq_free_heap_object(obj);
677 }
678 }
679 }
680}
681
682#[unsafe(no_mangle)]
687pub unsafe extern "C" fn seq_get_float(obj: *mut HeapObject) -> f64 {
688 assert!(!obj.is_null(), "seq_get_float: null object");
689 unsafe {
690 assert!(
691 (*obj).tag == HeapTag::Float as u8,
692 "seq_get_float: not a float"
693 );
694 (*(obj as *mut FloatObject)).value
695 }
696}
697
698#[unsafe(no_mangle)]
703pub unsafe extern "C" fn seq_get_bool(obj: *mut HeapObject) -> bool {
704 assert!(!obj.is_null(), "seq_get_bool: null object");
705 unsafe {
706 assert!(
707 (*obj).tag == HeapTag::Bool as u8,
708 "seq_get_bool: not a bool"
709 );
710 (*(obj as *mut BoolObject)).value
711 }
712}
713
714#[cfg(test)]
719mod tests {
720 use super::*;
721
722 #[test]
723 fn test_tag_untag_int() {
724 assert_eq!(untag_int(tag_int(0)), 0);
726 assert_eq!(untag_int(tag_int(1)), 1);
727 assert_eq!(untag_int(tag_int(42)), 42);
728 assert_eq!(untag_int(tag_int(1000000)), 1000000);
729
730 assert_eq!(untag_int(tag_int(-1)), -1);
732 assert_eq!(untag_int(tag_int(-42)), -42);
733 assert_eq!(untag_int(tag_int(-1000000)), -1000000);
734
735 let max_63bit = (1i64 << 62) - 1;
737 let min_63bit = -(1i64 << 62);
738 assert_eq!(untag_int(tag_int(max_63bit)), max_63bit);
739 assert_eq!(untag_int(tag_int(min_63bit)), min_63bit);
740 }
741
742 #[test]
743 fn test_is_tagged() {
744 assert!(is_tagged_int(tag_int(0)));
746 assert!(is_tagged_int(tag_int(42)));
747 assert!(is_tagged_int(tag_int(-1)));
748
749 let fake_ptr = 0x1000u64; assert!(is_tagged_heap(fake_ptr));
752 assert!(!is_tagged_int(fake_ptr));
753 }
754
755 #[test]
756 fn test_tagged_int_examples() {
757 assert_eq!(tag_int(42), 85);
759 assert_eq!(tag_int(42), 0x55);
760
761 assert_eq!(tag_int(-1), 0xFFFFFFFFFFFFFFFFu64);
763 }
764
765 #[test]
766 fn test_stack_basic_operations() {
767 let mut stack = TaggedStack::new(16);
768
769 assert!(stack.is_empty());
770 assert_eq!(stack.depth(), 0);
771
772 stack.push_int(10);
774 stack.push_int(20);
775 stack.push_int(30);
776
777 assert!(!stack.is_empty());
778 assert_eq!(stack.depth(), 3);
779
780 assert_eq!(stack.pop_int(), 30);
782 assert_eq!(stack.pop_int(), 20);
783 assert_eq!(stack.pop_int(), 10);
784
785 assert!(stack.is_empty());
786 }
787
788 #[test]
789 fn test_stack_peek() {
790 let mut stack = TaggedStack::new(16);
791 stack.push_int(42);
792
793 let peeked = stack.peek();
795 assert_eq!(peeked.slot0, 0); assert_eq!(peeked.slot1 as i64, 42); assert_eq!(stack.depth(), 1); assert_eq!(stack.pop_int(), 42);
800 assert!(stack.is_empty());
801 }
802
803 #[test]
804 fn test_stack_grow() {
805 let mut stack = TaggedStack::new(4);
806
807 for i in 0..100 {
809 stack.push_int(i);
810 }
811
812 assert_eq!(stack.depth(), 100);
813 assert!(stack.capacity >= 100);
814
815 for i in (0..100).rev() {
817 assert_eq!(stack.pop_int(), i);
818 }
819 }
820
821 #[test]
822 fn test_stack_clone() {
823 let mut stack = TaggedStack::new(16);
824 stack.push_int(1);
825 stack.push_int(2);
826 stack.push_int(3);
827
828 let mut cloned = stack.clone_stack();
829
830 assert_eq!(cloned.pop_int(), 3);
832 assert_eq!(cloned.pop_int(), 2);
833 assert_eq!(cloned.pop_int(), 1);
834
835 assert_eq!(stack.pop_int(), 3);
837 assert_eq!(stack.pop_int(), 2);
838 assert_eq!(stack.pop_int(), 1);
839 }
840
841 #[test]
842 fn test_ffi_stack_new_free() {
843 let stack = seq_stack_new(64);
844 assert!(!stack.is_null());
845
846 unsafe {
847 assert_eq!(seq_stack_capacity(stack), 64);
848 assert_eq!(seq_stack_sp(stack), 0);
849
850 seq_stack_free(stack);
851 }
852 }
853
854 #[test]
855 fn test_float_object() {
856 let obj = seq_alloc_float(2.5);
857 assert!(!obj.is_null());
858
859 unsafe {
860 assert_eq!((*obj).tag, HeapTag::Float as u8);
861 assert_eq!((*obj).refcount, 1);
862 assert_eq!(seq_get_float(obj), 2.5);
863
864 assert!((obj as usize) & 0x7 == 0);
866
867 seq_free_heap_object(obj);
868 }
869 }
870
871 #[test]
872 fn test_bool_object() {
873 let obj_true = seq_alloc_bool(true);
874 let obj_false = seq_alloc_bool(false);
875
876 unsafe {
877 assert!(seq_get_bool(obj_true));
878 assert!(!seq_get_bool(obj_false));
879
880 seq_free_heap_object(obj_true);
881 seq_free_heap_object(obj_false);
882 }
883 }
884
885 #[test]
886 fn test_refcount() {
887 let obj = seq_alloc_float(1.0);
888
889 unsafe {
890 assert_eq!((*obj).refcount, 1);
891
892 seq_heap_incref(obj);
893 assert_eq!((*obj).refcount, 2);
894
895 seq_heap_incref(obj);
896 assert_eq!((*obj).refcount, 3);
897
898 seq_heap_decref(obj);
899 assert_eq!((*obj).refcount, 2);
900
901 seq_heap_decref(obj);
902 assert_eq!((*obj).refcount, 1);
903
904 seq_heap_decref(obj);
906 }
908 }
909
910 #[test]
911 fn test_stack_with_heap_objects() {
912 let mut stack = TaggedStack::new(16);
913
914 stack.push_int(42);
916
917 let float_obj = seq_alloc_float(2.5);
919 stack.push(StackValue {
920 slot0: tag_heap_ptr(float_obj),
921 slot1: 0,
922 slot2: 0,
923 slot3: 0,
924 slot4: 0,
925 });
926
927 stack.push_int(100);
929
930 assert_eq!(stack.depth(), 3);
931
932 assert_eq!(stack.pop_int(), 100);
934
935 let val = stack.pop();
936 assert!(is_tagged_heap(val.slot0));
937 unsafe {
938 assert_eq!(seq_get_float(untag_heap_ptr(val.slot0)), 2.5);
939 }
940
941 assert_eq!(stack.pop_int(), 42);
942
943 unsafe {
946 seq_free_heap_object(float_obj);
947 }
948 }
949
950 #[test]
951 fn test_stack_value_size() {
952 assert_eq!(std::mem::size_of::<StackValue>(), 40);
955 assert_eq!(STACK_VALUE_SIZE, 40);
956 }
957}