seq_runtime/
tagged_stack.rs

1//! Tagged Stack Implementation
2//!
3//! A contiguous array of 40-byte values for high-performance stack operations.
4//! The 40-byte size matches Rust's `Value` enum with `#[repr(C)]` and the LLVM
5//! `%Value = type { i64, i64, i64, i64, i64 }` layout, enabling interoperability
6//! between inline IR and FFI operations.
7//!
8//! ## Stack Value Layout (40 bytes)
9//!
10//! ```text
11//! ┌─────────────────────────────────────────────────────────────────────────────┐
12//! │  slot0 (8 bytes)  │  slot1  │  slot2  │  slot3  │  slot4  │
13//! ├───────────────────┼─────────┼─────────┼─────────┼─────────┤
14//! │   Discriminant    │ Payload │  data   │  data   │  data   │
15//! └─────────────────────────────────────────────────────────────────────────────┘
16//!
17//! Value discriminants:
18//! - 0 = Int:   slot1 contains i64 value
19//! - 1 = Float: slot1 contains f64 bits
20//! - 2 = Bool:  slot1 contains 0 or 1
21//! - 3 = String, 4 = Variant, 5 = Map, 6 = Quotation, 7 = Closure
22//! ```
23//!
24//! ## Stack Layout
25//!
26//! ```text
27//! Stack: contiguous array of 40-byte StackValue slots
28//! ┌──────────┬──────────┬──────────┬──────────┬─────────┐
29//! │   v0     │   v1     │   v2     │   v3     │  ...    │
30//! │ (40 B)   │ (40 B)   │ (40 B)   │ (40 B)   │         │
31//! └──────────┴──────────┴──────────┴──────────┴─────────┘
32//!                                              ↑ SP
33//!
34//! - Grows upward
35//! - SP points to next free slot
36//! - Push: store at SP, increment SP
37//! - Pop: decrement SP, load from SP
38//! ```
39//!
40//! ## Performance Considerations
41//!
42//! The 40-byte size enables:
43//! - Direct compatibility with Rust's Value enum (#[repr(C)])
44//! - No conversion overhead when calling runtime functions
45//! - Simple inline integer/boolean operations in compiled code
46
47use std::alloc::{Layout, alloc, dealloc, realloc};
48
49// =============================================================================
50// StackValue - Conditional on nanbox feature
51// =============================================================================
52
53/// A 40-byte stack value, layout-compatible with LLVM's %Value type.
54///
55/// This matches `%Value = type { i64, i64, i64, i64, i64 }` in the generated IR.
56/// The size matches Rust's Value enum with #[repr(C)].
57#[cfg(not(feature = "nanbox"))]
58#[repr(C)]
59#[derive(Clone, Copy, Default)]
60pub struct StackValue {
61    /// First slot: discriminant (0=Int, 1=Float, 2=Bool, 3=String, etc.)
62    pub slot0: u64,
63    /// Second slot: primary payload (i64 value for Int, bool for Bool, etc.)
64    pub slot1: u64,
65    /// Third slot: type-specific data
66    pub slot2: u64,
67    /// Fourth slot: type-specific data
68    pub slot3: u64,
69    /// Fifth slot: type-specific data (for largest variant)
70    pub slot4: u64,
71}
72
73/// An 8-byte NaN-boxed stack value.
74///
75/// When the `nanbox` feature is enabled, all values are encoded in 8 bytes
76/// using IEEE 754 NaN-boxing. This reduces memory usage by 5x and improves
77/// cache utilization.
78#[cfg(feature = "nanbox")]
79#[repr(transparent)]
80#[derive(Clone, Copy, Default)]
81pub struct StackValue(pub crate::nanbox::NanBoxedValue);
82
83#[cfg(not(feature = "nanbox"))]
84impl std::fmt::Debug for StackValue {
85    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
86        // Discriminant 0 = Int
87        if self.slot0 == 0 {
88            write!(f, "Int({})", self.slot1 as i64)
89        } else if self.slot0 == 2 {
90            // Discriminant 2 = Bool
91            write!(f, "Bool({})", self.slot1 != 0)
92        } else {
93            write!(
94                f,
95                "StackValue {{ slot0: 0x{:x}, slot1: 0x{:x}, slot2: 0x{:x}, slot3: 0x{:x}, slot4: 0x{:x} }}",
96                self.slot0, self.slot1, self.slot2, self.slot3, self.slot4
97            )
98        }
99    }
100}
101
102#[cfg(feature = "nanbox")]
103impl std::fmt::Debug for StackValue {
104    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
105        write!(f, "StackValue({:?})", self.0)
106    }
107}
108
109/// Size of StackValue in bytes
110/// - Without nanbox: 40 bytes (5 x u64)
111/// - With nanbox: 8 bytes (NaN-boxed)
112pub const STACK_VALUE_SIZE: usize = std::mem::size_of::<StackValue>();
113
114// Compile-time assertions for StackValue size
115#[cfg(not(feature = "nanbox"))]
116const _: () = assert!(
117    STACK_VALUE_SIZE == 40,
118    "StackValue must be 40 bytes without nanbox"
119);
120
121#[cfg(feature = "nanbox")]
122const _: () = assert!(
123    STACK_VALUE_SIZE == 8,
124    "StackValue must be 8 bytes with nanbox"
125);
126
127/// Legacy type alias for backward compatibility
128pub type TaggedValue = u64;
129
130/// Tag bit mask
131pub const TAG_MASK: u64 = 1;
132
133/// Integer tag (low bit set)
134pub const TAG_INT: u64 = 1;
135
136/// Heap pointer tag (low bit clear)
137pub const TAG_HEAP: u64 = 0;
138
139/// Check if a tagged value is an integer
140#[inline(always)]
141pub const fn is_tagged_int(val: TaggedValue) -> bool {
142    (val & TAG_MASK) == TAG_INT
143}
144
145/// Check if a tagged value is a heap pointer
146#[inline(always)]
147pub const fn is_tagged_heap(val: TaggedValue) -> bool {
148    (val & TAG_MASK) == TAG_HEAP
149}
150
151/// Create a tagged integer from an i64
152///
153/// Note: Only 63 bits of the integer are preserved.
154/// Range: -4,611,686,018,427,387,904 to 4,611,686,018,427,387,903
155#[inline(always)]
156pub const fn tag_int(val: i64) -> TaggedValue {
157    // Arithmetic left shift preserves sign, then set tag bit
158    ((val << 1) as u64) | TAG_INT
159}
160
161/// Extract an i64 from a tagged integer
162///
163/// Caller must ensure `is_tagged_int(val)` is true.
164#[inline(always)]
165pub const fn untag_int(val: TaggedValue) -> i64 {
166    // Arithmetic right shift to restore sign
167    (val as i64) >> 1
168}
169
170/// Create a tagged heap pointer from a raw pointer
171///
172/// The pointer must be 8-byte aligned (low 3 bits clear).
173#[inline(always)]
174pub fn tag_heap_ptr(ptr: *mut HeapObject) -> TaggedValue {
175    debug_assert!(
176        (ptr as usize) & 0x7 == 0,
177        "HeapObject pointer must be 8-byte aligned"
178    );
179    ptr as TaggedValue
180}
181
182/// Extract a heap pointer from a tagged value
183///
184/// Caller must ensure `is_tagged_heap(val)` is true.
185#[inline(always)]
186pub fn untag_heap_ptr(val: TaggedValue) -> *mut HeapObject {
187    val as *mut HeapObject
188}
189
190/// Heap object type tags
191#[repr(u8)]
192#[derive(Debug, Clone, Copy, PartialEq, Eq)]
193pub enum HeapTag {
194    Float = 1,
195    Bool = 2,
196    String = 3,
197    Variant = 4,
198    Map = 5,
199    Quotation = 6,
200    Closure = 7,
201}
202
203/// Heap object header (8 bytes)
204///
205/// All heap-allocated values share this header for uniform access.
206#[repr(C)]
207pub struct HeapObject {
208    /// Type tag identifying the payload type
209    pub tag: u8,
210    /// Flags (e.g., is_static for non-refcounted values)
211    pub flags: u8,
212    /// Reserved for future use
213    pub reserved: u16,
214    /// Reference count (atomic for thread safety)
215    pub refcount: u32,
216    // Payload follows (variable size based on tag)
217}
218
219/// Flags for HeapObject
220pub mod heap_flags {
221    /// Object is statically allocated, don't decrement refcount
222    pub const IS_STATIC: u8 = 0x01;
223}
224
225impl HeapObject {
226    /// Check if this object is statically allocated
227    #[inline(always)]
228    pub fn is_static(&self) -> bool {
229        self.flags & heap_flags::IS_STATIC != 0
230    }
231}
232
233/// Float heap object
234#[repr(C)]
235pub struct FloatObject {
236    pub header: HeapObject,
237    pub value: f64,
238}
239
240/// Bool heap object
241#[repr(C)]
242pub struct BoolObject {
243    pub header: HeapObject,
244    pub value: bool,
245}
246
247/// Quotation heap object (stateless function)
248#[repr(C)]
249pub struct QuotationObject {
250    pub header: HeapObject,
251    /// C-convention wrapper function pointer
252    pub wrapper: usize,
253    /// tailcc implementation function pointer
254    pub impl_ptr: usize,
255}
256
257/// Default stack capacity (number of stack values)
258pub const DEFAULT_STACK_CAPACITY: usize = 4096;
259
260/// Stack state for the tagged value stack
261///
262/// This struct is passed to/from runtime functions and represents
263/// the complete state of a strand's value stack.
264///
265/// Uses 32-byte StackValue slots for FFI compatibility.
266#[repr(C)]
267pub struct TaggedStack {
268    /// Pointer to the base of the stack array (array of StackValue)
269    pub base: *mut StackValue,
270    /// Current stack pointer (index into array, points to next free slot)
271    pub sp: usize,
272    /// Total capacity of the stack (number of slots)
273    pub capacity: usize,
274}
275
276impl TaggedStack {
277    /// Create a new tagged stack with the given capacity
278    pub fn new(capacity: usize) -> Self {
279        let layout = Layout::array::<StackValue>(capacity).expect("stack layout overflow");
280        let base = unsafe { alloc(layout) as *mut StackValue };
281        if base.is_null() {
282            panic!("Failed to allocate tagged stack");
283        }
284
285        TaggedStack {
286            base,
287            sp: 0,
288            capacity,
289        }
290    }
291
292    /// Create a new tagged stack with default capacity
293    pub fn with_default_capacity() -> Self {
294        Self::new(DEFAULT_STACK_CAPACITY)
295    }
296
297    /// Get the current stack depth
298    #[inline(always)]
299    pub fn depth(&self) -> usize {
300        self.sp
301    }
302
303    /// Check if the stack is empty
304    #[inline(always)]
305    pub fn is_empty(&self) -> bool {
306        self.sp == 0
307    }
308
309    /// Check if the stack has room for `n` more values
310    #[inline(always)]
311    pub fn has_capacity(&self, n: usize) -> bool {
312        self.sp + n <= self.capacity
313    }
314
315    /// Grow the stack to accommodate more values
316    ///
317    /// Doubles capacity by default, or grows to `min_capacity` if larger.
318    pub fn grow(&mut self, min_capacity: usize) {
319        let new_capacity = (self.capacity * 2).max(min_capacity);
320        let old_layout = Layout::array::<StackValue>(self.capacity).expect("old layout overflow");
321        let new_layout = Layout::array::<StackValue>(new_capacity).expect("new layout overflow");
322
323        let new_base = unsafe {
324            realloc(self.base as *mut u8, old_layout, new_layout.size()) as *mut StackValue
325        };
326
327        if new_base.is_null() {
328            panic!(
329                "Failed to grow tagged stack from {} to {}",
330                self.capacity, new_capacity
331            );
332        }
333
334        self.base = new_base;
335        self.capacity = new_capacity;
336    }
337
338    /// Push a StackValue onto the stack
339    ///
340    /// Grows the stack if necessary.
341    #[inline]
342    pub fn push(&mut self, val: StackValue) {
343        if self.sp >= self.capacity {
344            self.grow(self.capacity + 1);
345        }
346        unsafe {
347            *self.base.add(self.sp) = val;
348        }
349        self.sp += 1;
350    }
351
352    /// Pop a StackValue from the stack
353    ///
354    /// Panics if the stack is empty.
355    #[inline]
356    pub fn pop(&mut self) -> StackValue {
357        assert!(self.sp > 0, "pop: stack is empty");
358        self.sp -= 1;
359        unsafe { *self.base.add(self.sp) }
360    }
361
362    /// Peek at the top value without removing it
363    ///
364    /// Panics if the stack is empty.
365    #[inline]
366    pub fn peek(&self) -> StackValue {
367        assert!(self.sp > 0, "peek: stack is empty");
368        unsafe { *self.base.add(self.sp - 1) }
369    }
370
371    /// Get a pointer to the current stack pointer position
372    ///
373    /// This is used by generated code for inline stack operations.
374    /// Returns pointer to next free StackValue slot.
375    #[inline(always)]
376    pub fn sp_ptr(&self) -> *mut StackValue {
377        unsafe { self.base.add(self.sp) }
378    }
379
380    /// Push an integer value using Value::Int layout
381    /// slot0 = 0 (Int discriminant), slot1 = value
382    #[cfg(not(feature = "nanbox"))]
383    #[inline]
384    pub fn push_int(&mut self, val: i64) {
385        self.push(StackValue {
386            slot0: 0, // Int discriminant
387            slot1: val as u64,
388            slot2: 0,
389            slot3: 0,
390            slot4: 0,
391        });
392    }
393
394    /// Push an integer value (NaN-boxed version)
395    #[cfg(feature = "nanbox")]
396    #[inline]
397    pub fn push_int(&mut self, val: i64) {
398        self.push(StackValue(crate::nanbox::NanBoxedValue::from_int(val)));
399    }
400
401    /// Pop and return an integer value
402    ///
403    /// Panics if the top value is not an integer.
404    #[cfg(not(feature = "nanbox"))]
405    #[inline]
406    pub fn pop_int(&mut self) -> i64 {
407        let val = self.pop();
408        assert!(
409            val.slot0 == 0,
410            "pop_int: expected Int (discriminant 0), got discriminant {}",
411            val.slot0
412        );
413        val.slot1 as i64
414    }
415
416    /// Pop and return an integer value (NaN-boxed version)
417    ///
418    /// Panics if the top value is not an integer.
419    #[cfg(feature = "nanbox")]
420    #[inline]
421    pub fn pop_int(&mut self) -> i64 {
422        let val = self.pop();
423        assert!(val.0.is_int(), "pop_int: expected Int");
424        val.0.as_int()
425    }
426
427    /// Clone this stack (for spawn)
428    ///
429    /// Creates a deep copy. For heap objects, properly clones them using
430    /// the clone_stack_value function which handles each type correctly.
431    pub fn clone_stack(&self) -> Self {
432        use crate::stack::clone_stack_value;
433
434        // Allocate new stack array
435        let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
436        let new_base = unsafe { alloc(layout) as *mut StackValue };
437        if new_base.is_null() {
438            panic!("Failed to allocate cloned stack");
439        }
440
441        // Clone each value properly (handles heap types correctly)
442        for i in 0..self.sp {
443            unsafe {
444                let sv = &*self.base.add(i);
445                let cloned = clone_stack_value(sv);
446                *new_base.add(i) = cloned;
447            }
448        }
449
450        TaggedStack {
451            base: new_base,
452            sp: self.sp,
453            capacity: self.capacity,
454        }
455    }
456}
457
458impl Drop for TaggedStack {
459    fn drop(&mut self) {
460        use crate::stack::drop_stack_value;
461
462        // Drop all values properly (handles heap types correctly)
463        for i in 0..self.sp {
464            unsafe {
465                let sv = *self.base.add(i);
466                drop_stack_value(sv);
467            }
468        }
469
470        // Free the stack array
471        if !self.base.is_null() {
472            let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
473            unsafe {
474                dealloc(self.base as *mut u8, layout);
475            }
476        }
477    }
478}
479
480// =============================================================================
481// FFI Functions for LLVM Codegen
482// =============================================================================
483
484/// Allocate a new tagged stack
485///
486/// Returns a pointer to a heap-allocated TaggedStack.
487/// The caller owns this memory and must call `seq_stack_free` when done.
488#[unsafe(no_mangle)]
489pub extern "C" fn seq_stack_new(capacity: usize) -> *mut TaggedStack {
490    let stack = Box::new(TaggedStack::new(capacity));
491    Box::into_raw(stack)
492}
493
494/// Allocate a new tagged stack with default capacity
495#[unsafe(no_mangle)]
496pub extern "C" fn seq_stack_new_default() -> *mut TaggedStack {
497    let stack = Box::new(TaggedStack::with_default_capacity());
498    Box::into_raw(stack)
499}
500
501/// Free a tagged stack
502///
503/// # Safety
504/// The pointer must have been returned by `seq_stack_new` or `seq_stack_new_default`.
505#[unsafe(no_mangle)]
506pub unsafe extern "C" fn seq_stack_free(stack: *mut TaggedStack) {
507    if !stack.is_null() {
508        unsafe {
509            drop(Box::from_raw(stack));
510        }
511    }
512}
513
514/// Grow a tagged stack to at least the given capacity
515///
516/// # Safety
517/// `stack` must be a valid pointer to a TaggedStack.
518#[unsafe(no_mangle)]
519pub unsafe extern "C" fn seq_stack_grow(stack: *mut TaggedStack, min_capacity: usize) {
520    assert!(!stack.is_null(), "seq_stack_grow: null stack");
521    unsafe {
522        (*stack).grow(min_capacity);
523    }
524}
525
526/// Get the base pointer of a tagged stack
527///
528/// This is used by generated code to get the array base.
529/// Returns a pointer to the first StackValue slot (32 bytes each).
530///
531/// # Safety
532/// `stack` must be a valid pointer to a TaggedStack.
533#[unsafe(no_mangle)]
534pub unsafe extern "C" fn seq_stack_base(stack: *mut TaggedStack) -> *mut StackValue {
535    assert!(!stack.is_null(), "seq_stack_base: null stack");
536    unsafe { (*stack).base }
537}
538
539/// Get the current stack pointer (as index)
540///
541/// # Safety
542/// `stack` must be a valid pointer to a TaggedStack.
543#[unsafe(no_mangle)]
544pub unsafe extern "C" fn seq_stack_sp(stack: *mut TaggedStack) -> usize {
545    assert!(!stack.is_null(), "seq_stack_sp: null stack");
546    unsafe { (*stack).sp }
547}
548
549/// Set the current stack pointer (as index)
550///
551/// # Safety
552/// The new SP must be <= capacity.
553#[unsafe(no_mangle)]
554pub unsafe extern "C" fn seq_stack_set_sp(stack: *mut TaggedStack, new_sp: usize) {
555    assert!(!stack.is_null(), "seq_stack_set_sp: null stack");
556    unsafe {
557        assert!(
558            new_sp <= (*stack).capacity,
559            "seq_stack_set_sp: sp exceeds capacity"
560        );
561        (*stack).sp = new_sp;
562    }
563}
564
565/// Get the capacity of a tagged stack
566///
567/// # Safety
568/// `stack` must be a valid pointer to a TaggedStack.
569#[unsafe(no_mangle)]
570pub unsafe extern "C" fn seq_stack_capacity(stack: *mut TaggedStack) -> usize {
571    assert!(!stack.is_null(), "seq_stack_capacity: null stack");
572    unsafe { (*stack).capacity }
573}
574
575/// Clone a tagged stack (for spawn)
576///
577/// # Safety
578/// `stack` must be a valid pointer to a TaggedStack.
579#[unsafe(no_mangle)]
580pub unsafe extern "C" fn seq_stack_clone(stack: *mut TaggedStack) -> *mut TaggedStack {
581    assert!(!stack.is_null(), "seq_stack_clone: null stack");
582    let cloned = unsafe { (*stack).clone_stack() };
583    Box::into_raw(Box::new(cloned))
584}
585
586// =============================================================================
587// Heap Object Allocation Functions
588// =============================================================================
589
590/// Allocate a float heap object
591#[unsafe(no_mangle)]
592pub extern "C" fn seq_alloc_float(value: f64) -> *mut HeapObject {
593    let layout = Layout::new::<FloatObject>();
594    let ptr = unsafe { alloc(layout) as *mut FloatObject };
595    if ptr.is_null() {
596        panic!("Failed to allocate FloatObject");
597    }
598
599    unsafe {
600        (*ptr).header = HeapObject {
601            tag: HeapTag::Float as u8,
602            flags: 0,
603            reserved: 0,
604            refcount: 1,
605        };
606        (*ptr).value = value;
607    }
608
609    ptr as *mut HeapObject
610}
611
612/// Allocate a bool heap object
613#[unsafe(no_mangle)]
614pub extern "C" fn seq_alloc_bool(value: bool) -> *mut HeapObject {
615    let layout = Layout::new::<BoolObject>();
616    let ptr = unsafe { alloc(layout) as *mut BoolObject };
617    if ptr.is_null() {
618        panic!("Failed to allocate BoolObject");
619    }
620
621    unsafe {
622        (*ptr).header = HeapObject {
623            tag: HeapTag::Bool as u8,
624            flags: 0,
625            reserved: 0,
626            refcount: 1,
627        };
628        (*ptr).value = value;
629    }
630
631    ptr as *mut HeapObject
632}
633
634/// Allocate a quotation heap object
635#[unsafe(no_mangle)]
636pub extern "C" fn seq_alloc_quotation(wrapper: usize, impl_ptr: usize) -> *mut HeapObject {
637    let layout = Layout::new::<QuotationObject>();
638    let ptr = unsafe { alloc(layout) as *mut QuotationObject };
639    if ptr.is_null() {
640        panic!("Failed to allocate QuotationObject");
641    }
642
643    unsafe {
644        (*ptr).header = HeapObject {
645            tag: HeapTag::Quotation as u8,
646            flags: 0,
647            reserved: 0,
648            refcount: 1,
649        };
650        (*ptr).wrapper = wrapper;
651        (*ptr).impl_ptr = impl_ptr;
652    }
653
654    ptr as *mut HeapObject
655}
656
657/// Free a heap object based on its type tag
658///
659/// # Safety
660/// `obj` must be a valid pointer to a HeapObject that was allocated by seq_alloc_*.
661#[unsafe(no_mangle)]
662pub unsafe extern "C" fn seq_free_heap_object(obj: *mut HeapObject) {
663    if obj.is_null() {
664        return;
665    }
666
667    unsafe {
668        let tag = (*obj).tag;
669        match tag {
670            t if t == HeapTag::Float as u8 => {
671                let layout = Layout::new::<FloatObject>();
672                dealloc(obj as *mut u8, layout);
673            }
674            t if t == HeapTag::Bool as u8 => {
675                let layout = Layout::new::<BoolObject>();
676                dealloc(obj as *mut u8, layout);
677            }
678            t if t == HeapTag::Quotation as u8 => {
679                let layout = Layout::new::<QuotationObject>();
680                dealloc(obj as *mut u8, layout);
681            }
682            // Note: String, Variant, Map, and Closure are stored inline in the 40-byte Value
683            // enum in non-nanbox mode, not as separate heap objects. Only types explicitly
684            // allocated via seq_alloc_* need handling here.
685            _ => {
686                // Unknown type, use minimum HeapObject size as fallback
687                let layout = Layout::new::<HeapObject>();
688                dealloc(obj as *mut u8, layout);
689            }
690        }
691    }
692}
693
694/// Increment the reference count of a heap object
695///
696/// # Safety
697/// `obj` must be a valid pointer to a HeapObject.
698#[unsafe(no_mangle)]
699pub unsafe extern "C" fn seq_heap_incref(obj: *mut HeapObject) {
700    if obj.is_null() {
701        return;
702    }
703    unsafe {
704        if !(*obj).is_static() {
705            let rc = &(*obj).refcount as *const u32 as *mut u32;
706            std::sync::atomic::AtomicU32::from_ptr(rc)
707                .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
708        }
709    }
710}
711
712/// Decrement the reference count of a heap object, freeing if zero
713///
714/// # Safety
715/// `obj` must be a valid pointer to a HeapObject.
716#[unsafe(no_mangle)]
717pub unsafe extern "C" fn seq_heap_decref(obj: *mut HeapObject) {
718    if obj.is_null() {
719        return;
720    }
721    unsafe {
722        if !(*obj).is_static() {
723            let rc = &(*obj).refcount as *const u32 as *mut u32;
724            let old = std::sync::atomic::AtomicU32::from_ptr(rc)
725                .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
726            if old == 1 {
727                seq_free_heap_object(obj);
728            }
729        }
730    }
731}
732
733/// Get the float value from a FloatObject
734///
735/// # Safety
736/// `obj` must be a valid pointer to a FloatObject.
737#[unsafe(no_mangle)]
738pub unsafe extern "C" fn seq_get_float(obj: *mut HeapObject) -> f64 {
739    assert!(!obj.is_null(), "seq_get_float: null object");
740    unsafe {
741        assert!(
742            (*obj).tag == HeapTag::Float as u8,
743            "seq_get_float: not a float"
744        );
745        (*(obj as *mut FloatObject)).value
746    }
747}
748
749/// Get the bool value from a BoolObject
750///
751/// # Safety
752/// `obj` must be a valid pointer to a BoolObject.
753#[unsafe(no_mangle)]
754pub unsafe extern "C" fn seq_get_bool(obj: *mut HeapObject) -> bool {
755    assert!(!obj.is_null(), "seq_get_bool: null object");
756    unsafe {
757        assert!(
758            (*obj).tag == HeapTag::Bool as u8,
759            "seq_get_bool: not a bool"
760        );
761        (*(obj as *mut BoolObject)).value
762    }
763}
764
765// =============================================================================
766// Tests
767// =============================================================================
768
769#[cfg(test)]
770mod tests {
771    use super::*;
772
773    #[test]
774    fn test_tag_untag_int() {
775        // Positive integers
776        assert_eq!(untag_int(tag_int(0)), 0);
777        assert_eq!(untag_int(tag_int(1)), 1);
778        assert_eq!(untag_int(tag_int(42)), 42);
779        assert_eq!(untag_int(tag_int(1000000)), 1000000);
780
781        // Negative integers
782        assert_eq!(untag_int(tag_int(-1)), -1);
783        assert_eq!(untag_int(tag_int(-42)), -42);
784        assert_eq!(untag_int(tag_int(-1000000)), -1000000);
785
786        // Edge cases (63-bit range)
787        let max_63bit = (1i64 << 62) - 1;
788        let min_63bit = -(1i64 << 62);
789        assert_eq!(untag_int(tag_int(max_63bit)), max_63bit);
790        assert_eq!(untag_int(tag_int(min_63bit)), min_63bit);
791    }
792
793    #[test]
794    fn test_is_tagged() {
795        // Integers have low bit set
796        assert!(is_tagged_int(tag_int(0)));
797        assert!(is_tagged_int(tag_int(42)));
798        assert!(is_tagged_int(tag_int(-1)));
799
800        // Pointers have low bit clear (use a fake aligned address)
801        let fake_ptr = 0x1000u64; // 8-byte aligned
802        assert!(is_tagged_heap(fake_ptr));
803        assert!(!is_tagged_int(fake_ptr));
804    }
805
806    #[test]
807    fn test_tagged_int_examples() {
808        // From design doc: Integer `42` → `0x55` (42 << 1 | 1 = 85)
809        assert_eq!(tag_int(42), 85);
810        assert_eq!(tag_int(42), 0x55);
811
812        // Integer `-1` → `0xFFFFFFFFFFFFFFFF` (-1 << 1 | 1)
813        assert_eq!(tag_int(-1), 0xFFFFFFFFFFFFFFFFu64);
814    }
815
816    #[test]
817    fn test_stack_basic_operations() {
818        let mut stack = TaggedStack::new(16);
819
820        assert!(stack.is_empty());
821        assert_eq!(stack.depth(), 0);
822
823        // Push integers
824        stack.push_int(10);
825        stack.push_int(20);
826        stack.push_int(30);
827
828        assert!(!stack.is_empty());
829        assert_eq!(stack.depth(), 3);
830
831        // Pop and verify
832        assert_eq!(stack.pop_int(), 30);
833        assert_eq!(stack.pop_int(), 20);
834        assert_eq!(stack.pop_int(), 10);
835
836        assert!(stack.is_empty());
837    }
838
839    #[test]
840    #[cfg(not(feature = "nanbox"))]
841    fn test_stack_peek() {
842        let mut stack = TaggedStack::new(16);
843        stack.push_int(42);
844
845        // With Value layout: slot0 = discriminant (0 for Int), slot1 = value
846        let peeked = stack.peek();
847        assert_eq!(peeked.slot0, 0); // Int discriminant
848        assert_eq!(peeked.slot1 as i64, 42); // Value in slot1
849        assert_eq!(stack.depth(), 1); // Still there
850
851        assert_eq!(stack.pop_int(), 42);
852        assert!(stack.is_empty());
853    }
854
855    #[test]
856    #[cfg(feature = "nanbox")]
857    fn test_stack_peek_nanbox() {
858        let mut stack = TaggedStack::new(16);
859        stack.push_int(42);
860
861        // With NaN-boxing: verify the value via is_int/as_int
862        let peeked = stack.peek();
863        assert!(peeked.0.is_int());
864        assert_eq!(peeked.0.as_int(), 42);
865        assert_eq!(stack.depth(), 1); // Still there
866
867        assert_eq!(stack.pop_int(), 42);
868        assert!(stack.is_empty());
869    }
870
871    #[test]
872    fn test_stack_grow() {
873        let mut stack = TaggedStack::new(4);
874
875        // Fill beyond initial capacity
876        for i in 0..100 {
877            stack.push_int(i);
878        }
879
880        assert_eq!(stack.depth(), 100);
881        assert!(stack.capacity >= 100);
882
883        // Verify all values
884        for i in (0..100).rev() {
885            assert_eq!(stack.pop_int(), i);
886        }
887    }
888
889    #[test]
890    fn test_stack_clone() {
891        let mut stack = TaggedStack::new(16);
892        stack.push_int(1);
893        stack.push_int(2);
894        stack.push_int(3);
895
896        let mut cloned = stack.clone_stack();
897
898        // Both should have same values
899        assert_eq!(cloned.pop_int(), 3);
900        assert_eq!(cloned.pop_int(), 2);
901        assert_eq!(cloned.pop_int(), 1);
902
903        // Original should be unchanged
904        assert_eq!(stack.pop_int(), 3);
905        assert_eq!(stack.pop_int(), 2);
906        assert_eq!(stack.pop_int(), 1);
907    }
908
909    #[test]
910    fn test_ffi_stack_new_free() {
911        let stack = seq_stack_new(64);
912        assert!(!stack.is_null());
913
914        unsafe {
915            assert_eq!(seq_stack_capacity(stack), 64);
916            assert_eq!(seq_stack_sp(stack), 0);
917
918            seq_stack_free(stack);
919        }
920    }
921
922    #[test]
923    fn test_float_object() {
924        let obj = seq_alloc_float(2.5);
925        assert!(!obj.is_null());
926
927        unsafe {
928            assert_eq!((*obj).tag, HeapTag::Float as u8);
929            assert_eq!((*obj).refcount, 1);
930            assert_eq!(seq_get_float(obj), 2.5);
931
932            // Verify it's 8-byte aligned
933            assert!((obj as usize) & 0x7 == 0);
934
935            seq_free_heap_object(obj);
936        }
937    }
938
939    #[test]
940    fn test_bool_object() {
941        let obj_true = seq_alloc_bool(true);
942        let obj_false = seq_alloc_bool(false);
943
944        unsafe {
945            assert!(seq_get_bool(obj_true));
946            assert!(!seq_get_bool(obj_false));
947
948            seq_free_heap_object(obj_true);
949            seq_free_heap_object(obj_false);
950        }
951    }
952
953    #[test]
954    fn test_refcount() {
955        let obj = seq_alloc_float(1.0);
956
957        unsafe {
958            assert_eq!((*obj).refcount, 1);
959
960            seq_heap_incref(obj);
961            assert_eq!((*obj).refcount, 2);
962
963            seq_heap_incref(obj);
964            assert_eq!((*obj).refcount, 3);
965
966            seq_heap_decref(obj);
967            assert_eq!((*obj).refcount, 2);
968
969            seq_heap_decref(obj);
970            assert_eq!((*obj).refcount, 1);
971
972            // Final decref should free
973            seq_heap_decref(obj);
974            // Can't check after free, but shouldn't crash
975        }
976    }
977
978    #[test]
979    #[cfg(not(feature = "nanbox"))]
980    fn test_stack_with_heap_objects() {
981        let mut stack = TaggedStack::new(16);
982
983        // Push an integer
984        stack.push_int(42);
985
986        // Push a float (heap object)
987        let float_obj = seq_alloc_float(2.5);
988        stack.push(StackValue {
989            slot0: tag_heap_ptr(float_obj),
990            slot1: 0,
991            slot2: 0,
992            slot3: 0,
993            slot4: 0,
994        });
995
996        // Push another integer
997        stack.push_int(100);
998
999        assert_eq!(stack.depth(), 3);
1000
1001        // Pop and verify
1002        assert_eq!(stack.pop_int(), 100);
1003
1004        let val = stack.pop();
1005        assert!(is_tagged_heap(val.slot0));
1006        unsafe {
1007            assert_eq!(seq_get_float(untag_heap_ptr(val.slot0)), 2.5);
1008        }
1009
1010        assert_eq!(stack.pop_int(), 42);
1011
1012        // Note: float_obj refcount was 1, we popped it without decref,
1013        // so it's still alive. In real code, drop would decref.
1014        unsafe {
1015            seq_free_heap_object(float_obj);
1016        }
1017    }
1018
1019    #[test]
1020    #[cfg(not(feature = "nanbox"))]
1021    fn test_stack_value_size() {
1022        // Verify StackValue is 40 bytes, matching LLVM's %Value type
1023        // (5 x i64 = 5 x 8 = 40 bytes, compatible with Rust's Value with #[repr(C)])
1024        assert_eq!(std::mem::size_of::<StackValue>(), 40);
1025        assert_eq!(STACK_VALUE_SIZE, 40);
1026    }
1027
1028    #[test]
1029    #[cfg(feature = "nanbox")]
1030    fn test_stack_value_size_nanbox() {
1031        // With nanbox feature, StackValue is 8 bytes (NaN-boxed)
1032        assert_eq!(std::mem::size_of::<StackValue>(), 8);
1033        assert_eq!(STACK_VALUE_SIZE, 8);
1034    }
1035}