seq_runtime/
tagged_stack.rs

1//! Tagged Stack Implementation
2//!
3//! A contiguous array of 40-byte values for high-performance stack operations.
4//! The 40-byte size matches Rust's `Value` enum with `#[repr(C)]` and the LLVM
5//! `%Value = type { i64, i64, i64, i64, i64 }` layout, enabling interoperability
6//! between inline IR and FFI operations.
7//!
8//! ## Stack Value Layout (40 bytes)
9//!
10//! ```text
11//! ┌─────────────────────────────────────────────────────────────────────────────┐
12//! │  slot0 (8 bytes)  │  slot1  │  slot2  │  slot3  │  slot4  │
13//! ├───────────────────┼─────────┼─────────┼─────────┼─────────┤
14//! │   Discriminant    │ Payload │  data   │  data   │  data   │
15//! └─────────────────────────────────────────────────────────────────────────────┘
16//!
17//! Value discriminants:
18//! - 0 = Int:   slot1 contains i64 value
19//! - 1 = Float: slot1 contains f64 bits
20//! - 2 = Bool:  slot1 contains 0 or 1
21//! - 3 = String, 4 = Variant, 5 = Map, 6 = Quotation, 7 = Closure
22//! ```
23//!
24//! ## Stack Layout
25//!
26//! ```text
27//! Stack: contiguous array of 40-byte StackValue slots
28//! ┌──────────┬──────────┬──────────┬──────────┬─────────┐
29//! │   v0     │   v1     │   v2     │   v3     │  ...    │
30//! │ (40 B)   │ (40 B)   │ (40 B)   │ (40 B)   │         │
31//! └──────────┴──────────┴──────────┴──────────┴─────────┘
32//!                                              ↑ SP
33//!
34//! - Grows upward
35//! - SP points to next free slot
36//! - Push: store at SP, increment SP
37//! - Pop: decrement SP, load from SP
38//! ```
39//!
40//! ## Performance Considerations
41//!
42//! The 40-byte size enables:
43//! - Direct compatibility with Rust's Value enum (#[repr(C)])
44//! - No conversion overhead when calling runtime functions
45//! - Simple inline integer/boolean operations in compiled code
46
47use std::alloc::{Layout, alloc, dealloc, realloc};
48
49/// A 40-byte stack value, layout-compatible with LLVM's %Value type.
50///
51/// This matches `%Value = type { i64, i64, i64, i64, i64 }` in the generated IR.
52/// The size matches Rust's Value enum with #[repr(C)].
53#[repr(C)]
54#[derive(Clone, Copy, Default)]
55pub struct StackValue {
56    /// First slot: discriminant (0=Int, 1=Float, 2=Bool, 3=String, etc.)
57    pub slot0: u64,
58    /// Second slot: primary payload (i64 value for Int, bool for Bool, etc.)
59    pub slot1: u64,
60    /// Third slot: type-specific data
61    pub slot2: u64,
62    /// Fourth slot: type-specific data
63    pub slot3: u64,
64    /// Fifth slot: type-specific data (for largest variant)
65    pub slot4: u64,
66}
67
68impl std::fmt::Debug for StackValue {
69    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
70        // Discriminant 0 = Int
71        if self.slot0 == 0 {
72            write!(f, "Int({})", self.slot1 as i64)
73        } else if self.slot0 == 2 {
74            // Discriminant 2 = Bool
75            write!(f, "Bool({})", self.slot1 != 0)
76        } else {
77            write!(
78                f,
79                "StackValue {{ slot0: 0x{:x}, slot1: 0x{:x}, slot2: 0x{:x}, slot3: 0x{:x}, slot4: 0x{:x} }}",
80                self.slot0, self.slot1, self.slot2, self.slot3, self.slot4
81            )
82        }
83    }
84}
85
86/// Size of StackValue in bytes (should be 40)
87pub const STACK_VALUE_SIZE: usize = std::mem::size_of::<StackValue>();
88
89// Compile-time assertion that StackValue is 40 bytes
90const _: () = assert!(STACK_VALUE_SIZE == 40, "StackValue must be 40 bytes");
91
92/// Legacy type alias for backward compatibility
93pub type TaggedValue = u64;
94
95/// Tag bit mask
96pub const TAG_MASK: u64 = 1;
97
98/// Integer tag (low bit set)
99pub const TAG_INT: u64 = 1;
100
101/// Heap pointer tag (low bit clear)
102pub const TAG_HEAP: u64 = 0;
103
104/// Check if a tagged value is an integer
105#[inline(always)]
106pub const fn is_tagged_int(val: TaggedValue) -> bool {
107    (val & TAG_MASK) == TAG_INT
108}
109
110/// Check if a tagged value is a heap pointer
111#[inline(always)]
112pub const fn is_tagged_heap(val: TaggedValue) -> bool {
113    (val & TAG_MASK) == TAG_HEAP
114}
115
116/// Create a tagged integer from an i64
117///
118/// Note: Only 63 bits of the integer are preserved.
119/// Range: -4,611,686,018,427,387,904 to 4,611,686,018,427,387,903
120#[inline(always)]
121pub const fn tag_int(val: i64) -> TaggedValue {
122    // Arithmetic left shift preserves sign, then set tag bit
123    ((val << 1) as u64) | TAG_INT
124}
125
126/// Extract an i64 from a tagged integer
127///
128/// Caller must ensure `is_tagged_int(val)` is true.
129#[inline(always)]
130pub const fn untag_int(val: TaggedValue) -> i64 {
131    // Arithmetic right shift to restore sign
132    (val as i64) >> 1
133}
134
135/// Create a tagged heap pointer from a raw pointer
136///
137/// The pointer must be 8-byte aligned (low 3 bits clear).
138#[inline(always)]
139pub fn tag_heap_ptr(ptr: *mut HeapObject) -> TaggedValue {
140    debug_assert!(
141        (ptr as usize) & 0x7 == 0,
142        "HeapObject pointer must be 8-byte aligned"
143    );
144    ptr as TaggedValue
145}
146
147/// Extract a heap pointer from a tagged value
148///
149/// Caller must ensure `is_tagged_heap(val)` is true.
150#[inline(always)]
151pub fn untag_heap_ptr(val: TaggedValue) -> *mut HeapObject {
152    val as *mut HeapObject
153}
154
155/// Heap object type tags
156#[repr(u8)]
157#[derive(Debug, Clone, Copy, PartialEq, Eq)]
158pub enum HeapTag {
159    Float = 1,
160    Bool = 2,
161    String = 3,
162    Variant = 4,
163    Map = 5,
164    Quotation = 6,
165    Closure = 7,
166}
167
168/// Heap object header (8 bytes)
169///
170/// All heap-allocated values share this header for uniform access.
171#[repr(C)]
172pub struct HeapObject {
173    /// Type tag identifying the payload type
174    pub tag: u8,
175    /// Flags (e.g., is_static for non-refcounted values)
176    pub flags: u8,
177    /// Reserved for future use
178    pub reserved: u16,
179    /// Reference count (atomic for thread safety)
180    pub refcount: u32,
181    // Payload follows (variable size based on tag)
182}
183
184/// Flags for HeapObject
185pub mod heap_flags {
186    /// Object is statically allocated, don't decrement refcount
187    pub const IS_STATIC: u8 = 0x01;
188}
189
190impl HeapObject {
191    /// Check if this object is statically allocated
192    #[inline(always)]
193    pub fn is_static(&self) -> bool {
194        self.flags & heap_flags::IS_STATIC != 0
195    }
196}
197
198/// Float heap object
199#[repr(C)]
200pub struct FloatObject {
201    pub header: HeapObject,
202    pub value: f64,
203}
204
205/// Bool heap object
206#[repr(C)]
207pub struct BoolObject {
208    pub header: HeapObject,
209    pub value: bool,
210}
211
212/// Quotation heap object (stateless function)
213#[repr(C)]
214pub struct QuotationObject {
215    pub header: HeapObject,
216    /// C-convention wrapper function pointer
217    pub wrapper: usize,
218    /// tailcc implementation function pointer
219    pub impl_ptr: usize,
220}
221
222/// Default stack capacity (number of stack values)
223pub const DEFAULT_STACK_CAPACITY: usize = 4096;
224
225/// Stack state for the tagged value stack
226///
227/// This struct is passed to/from runtime functions and represents
228/// the complete state of a strand's value stack.
229///
230/// Uses 32-byte StackValue slots for FFI compatibility.
231#[repr(C)]
232pub struct TaggedStack {
233    /// Pointer to the base of the stack array (array of StackValue)
234    pub base: *mut StackValue,
235    /// Current stack pointer (index into array, points to next free slot)
236    pub sp: usize,
237    /// Total capacity of the stack (number of slots)
238    pub capacity: usize,
239}
240
241impl TaggedStack {
242    /// Create a new tagged stack with the given capacity
243    pub fn new(capacity: usize) -> Self {
244        let layout = Layout::array::<StackValue>(capacity).expect("stack layout overflow");
245        let base = unsafe { alloc(layout) as *mut StackValue };
246        if base.is_null() {
247            panic!("Failed to allocate tagged stack");
248        }
249
250        TaggedStack {
251            base,
252            sp: 0,
253            capacity,
254        }
255    }
256
257    /// Create a new tagged stack with default capacity
258    pub fn with_default_capacity() -> Self {
259        Self::new(DEFAULT_STACK_CAPACITY)
260    }
261
262    /// Get the current stack depth
263    #[inline(always)]
264    pub fn depth(&self) -> usize {
265        self.sp
266    }
267
268    /// Check if the stack is empty
269    #[inline(always)]
270    pub fn is_empty(&self) -> bool {
271        self.sp == 0
272    }
273
274    /// Check if the stack has room for `n` more values
275    #[inline(always)]
276    pub fn has_capacity(&self, n: usize) -> bool {
277        self.sp + n <= self.capacity
278    }
279
280    /// Grow the stack to accommodate more values
281    ///
282    /// Doubles capacity by default, or grows to `min_capacity` if larger.
283    pub fn grow(&mut self, min_capacity: usize) {
284        let new_capacity = (self.capacity * 2).max(min_capacity);
285        let old_layout = Layout::array::<StackValue>(self.capacity).expect("old layout overflow");
286        let new_layout = Layout::array::<StackValue>(new_capacity).expect("new layout overflow");
287
288        let new_base = unsafe {
289            realloc(self.base as *mut u8, old_layout, new_layout.size()) as *mut StackValue
290        };
291
292        if new_base.is_null() {
293            panic!(
294                "Failed to grow tagged stack from {} to {}",
295                self.capacity, new_capacity
296            );
297        }
298
299        self.base = new_base;
300        self.capacity = new_capacity;
301    }
302
303    /// Push a StackValue onto the stack
304    ///
305    /// Grows the stack if necessary.
306    #[inline]
307    pub fn push(&mut self, val: StackValue) {
308        if self.sp >= self.capacity {
309            self.grow(self.capacity + 1);
310        }
311        unsafe {
312            *self.base.add(self.sp) = val;
313        }
314        self.sp += 1;
315    }
316
317    /// Pop a StackValue from the stack
318    ///
319    /// Panics if the stack is empty.
320    #[inline]
321    pub fn pop(&mut self) -> StackValue {
322        assert!(self.sp > 0, "pop: stack is empty");
323        self.sp -= 1;
324        unsafe { *self.base.add(self.sp) }
325    }
326
327    /// Peek at the top value without removing it
328    ///
329    /// Panics if the stack is empty.
330    #[inline]
331    pub fn peek(&self) -> StackValue {
332        assert!(self.sp > 0, "peek: stack is empty");
333        unsafe { *self.base.add(self.sp - 1) }
334    }
335
336    /// Get a pointer to the current stack pointer position
337    ///
338    /// This is used by generated code for inline stack operations.
339    /// Returns pointer to next free StackValue slot.
340    #[inline(always)]
341    pub fn sp_ptr(&self) -> *mut StackValue {
342        unsafe { self.base.add(self.sp) }
343    }
344
345    /// Push an integer value using Value::Int layout
346    /// slot0 = 0 (Int discriminant), slot1 = value
347    #[inline]
348    pub fn push_int(&mut self, val: i64) {
349        self.push(StackValue {
350            slot0: 0, // Int discriminant
351            slot1: val as u64,
352            slot2: 0,
353            slot3: 0,
354            slot4: 0,
355        });
356    }
357
358    /// Pop and return an integer value
359    ///
360    /// Panics if the top value is not an integer.
361    #[inline]
362    pub fn pop_int(&mut self) -> i64 {
363        let val = self.pop();
364        assert!(
365            val.slot0 == 0,
366            "pop_int: expected Int (discriminant 0), got discriminant {}",
367            val.slot0
368        );
369        val.slot1 as i64
370    }
371
372    /// Clone this stack (for spawn)
373    ///
374    /// Creates a deep copy. For heap objects, properly clones them using
375    /// the clone_stack_value function which handles each type correctly.
376    pub fn clone_stack(&self) -> Self {
377        use crate::stack::clone_stack_value;
378
379        // Allocate new stack array
380        let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
381        let new_base = unsafe { alloc(layout) as *mut StackValue };
382        if new_base.is_null() {
383            panic!("Failed to allocate cloned stack");
384        }
385
386        // Clone each value properly (handles heap types correctly)
387        for i in 0..self.sp {
388            unsafe {
389                let sv = &*self.base.add(i);
390                let cloned = clone_stack_value(sv);
391                *new_base.add(i) = cloned;
392            }
393        }
394
395        TaggedStack {
396            base: new_base,
397            sp: self.sp,
398            capacity: self.capacity,
399        }
400    }
401}
402
403impl Drop for TaggedStack {
404    fn drop(&mut self) {
405        use crate::stack::drop_stack_value;
406
407        // Drop all values properly (handles heap types correctly)
408        for i in 0..self.sp {
409            unsafe {
410                let sv = *self.base.add(i);
411                drop_stack_value(sv);
412            }
413        }
414
415        // Free the stack array
416        if !self.base.is_null() {
417            let layout = Layout::array::<StackValue>(self.capacity).expect("layout overflow");
418            unsafe {
419                dealloc(self.base as *mut u8, layout);
420            }
421        }
422    }
423}
424
425// =============================================================================
426// FFI Functions for LLVM Codegen
427// =============================================================================
428
429/// Allocate a new tagged stack
430///
431/// Returns a pointer to a heap-allocated TaggedStack.
432/// The caller owns this memory and must call `seq_stack_free` when done.
433#[unsafe(no_mangle)]
434pub extern "C" fn seq_stack_new(capacity: usize) -> *mut TaggedStack {
435    let stack = Box::new(TaggedStack::new(capacity));
436    Box::into_raw(stack)
437}
438
439/// Allocate a new tagged stack with default capacity
440#[unsafe(no_mangle)]
441pub extern "C" fn seq_stack_new_default() -> *mut TaggedStack {
442    let stack = Box::new(TaggedStack::with_default_capacity());
443    Box::into_raw(stack)
444}
445
446/// Free a tagged stack
447///
448/// # Safety
449/// The pointer must have been returned by `seq_stack_new` or `seq_stack_new_default`.
450#[unsafe(no_mangle)]
451pub unsafe extern "C" fn seq_stack_free(stack: *mut TaggedStack) {
452    if !stack.is_null() {
453        unsafe {
454            drop(Box::from_raw(stack));
455        }
456    }
457}
458
459/// Grow a tagged stack to at least the given capacity
460///
461/// # Safety
462/// `stack` must be a valid pointer to a TaggedStack.
463#[unsafe(no_mangle)]
464pub unsafe extern "C" fn seq_stack_grow(stack: *mut TaggedStack, min_capacity: usize) {
465    assert!(!stack.is_null(), "seq_stack_grow: null stack");
466    unsafe {
467        (*stack).grow(min_capacity);
468    }
469}
470
471/// Get the base pointer of a tagged stack
472///
473/// This is used by generated code to get the array base.
474/// Returns a pointer to the first StackValue slot (32 bytes each).
475///
476/// # Safety
477/// `stack` must be a valid pointer to a TaggedStack.
478#[unsafe(no_mangle)]
479pub unsafe extern "C" fn seq_stack_base(stack: *mut TaggedStack) -> *mut StackValue {
480    assert!(!stack.is_null(), "seq_stack_base: null stack");
481    unsafe { (*stack).base }
482}
483
484/// Get the current stack pointer (as index)
485///
486/// # Safety
487/// `stack` must be a valid pointer to a TaggedStack.
488#[unsafe(no_mangle)]
489pub unsafe extern "C" fn seq_stack_sp(stack: *mut TaggedStack) -> usize {
490    assert!(!stack.is_null(), "seq_stack_sp: null stack");
491    unsafe { (*stack).sp }
492}
493
494/// Set the current stack pointer (as index)
495///
496/// # Safety
497/// The new SP must be <= capacity.
498#[unsafe(no_mangle)]
499pub unsafe extern "C" fn seq_stack_set_sp(stack: *mut TaggedStack, new_sp: usize) {
500    assert!(!stack.is_null(), "seq_stack_set_sp: null stack");
501    unsafe {
502        assert!(
503            new_sp <= (*stack).capacity,
504            "seq_stack_set_sp: sp exceeds capacity"
505        );
506        (*stack).sp = new_sp;
507    }
508}
509
510/// Get the capacity of a tagged stack
511///
512/// # Safety
513/// `stack` must be a valid pointer to a TaggedStack.
514#[unsafe(no_mangle)]
515pub unsafe extern "C" fn seq_stack_capacity(stack: *mut TaggedStack) -> usize {
516    assert!(!stack.is_null(), "seq_stack_capacity: null stack");
517    unsafe { (*stack).capacity }
518}
519
520/// Clone a tagged stack (for spawn)
521///
522/// # Safety
523/// `stack` must be a valid pointer to a TaggedStack.
524#[unsafe(no_mangle)]
525pub unsafe extern "C" fn seq_stack_clone(stack: *mut TaggedStack) -> *mut TaggedStack {
526    assert!(!stack.is_null(), "seq_stack_clone: null stack");
527    let cloned = unsafe { (*stack).clone_stack() };
528    Box::into_raw(Box::new(cloned))
529}
530
531// =============================================================================
532// Heap Object Allocation Functions
533// =============================================================================
534
535/// Allocate a float heap object
536#[unsafe(no_mangle)]
537pub extern "C" fn seq_alloc_float(value: f64) -> *mut HeapObject {
538    let layout = Layout::new::<FloatObject>();
539    let ptr = unsafe { alloc(layout) as *mut FloatObject };
540    if ptr.is_null() {
541        panic!("Failed to allocate FloatObject");
542    }
543
544    unsafe {
545        (*ptr).header = HeapObject {
546            tag: HeapTag::Float as u8,
547            flags: 0,
548            reserved: 0,
549            refcount: 1,
550        };
551        (*ptr).value = value;
552    }
553
554    ptr as *mut HeapObject
555}
556
557/// Allocate a bool heap object
558#[unsafe(no_mangle)]
559pub extern "C" fn seq_alloc_bool(value: bool) -> *mut HeapObject {
560    let layout = Layout::new::<BoolObject>();
561    let ptr = unsafe { alloc(layout) as *mut BoolObject };
562    if ptr.is_null() {
563        panic!("Failed to allocate BoolObject");
564    }
565
566    unsafe {
567        (*ptr).header = HeapObject {
568            tag: HeapTag::Bool as u8,
569            flags: 0,
570            reserved: 0,
571            refcount: 1,
572        };
573        (*ptr).value = value;
574    }
575
576    ptr as *mut HeapObject
577}
578
579/// Allocate a quotation heap object
580#[unsafe(no_mangle)]
581pub extern "C" fn seq_alloc_quotation(wrapper: usize, impl_ptr: usize) -> *mut HeapObject {
582    let layout = Layout::new::<QuotationObject>();
583    let ptr = unsafe { alloc(layout) as *mut QuotationObject };
584    if ptr.is_null() {
585        panic!("Failed to allocate QuotationObject");
586    }
587
588    unsafe {
589        (*ptr).header = HeapObject {
590            tag: HeapTag::Quotation as u8,
591            flags: 0,
592            reserved: 0,
593            refcount: 1,
594        };
595        (*ptr).wrapper = wrapper;
596        (*ptr).impl_ptr = impl_ptr;
597    }
598
599    ptr as *mut HeapObject
600}
601
602/// Free a heap object based on its type tag
603///
604/// # Safety
605/// `obj` must be a valid pointer to a HeapObject that was allocated by seq_alloc_*.
606#[unsafe(no_mangle)]
607pub unsafe extern "C" fn seq_free_heap_object(obj: *mut HeapObject) {
608    if obj.is_null() {
609        return;
610    }
611
612    unsafe {
613        let tag = (*obj).tag;
614        match tag {
615            t if t == HeapTag::Float as u8 => {
616                let layout = Layout::new::<FloatObject>();
617                dealloc(obj as *mut u8, layout);
618            }
619            t if t == HeapTag::Bool as u8 => {
620                let layout = Layout::new::<BoolObject>();
621                dealloc(obj as *mut u8, layout);
622            }
623            t if t == HeapTag::Quotation as u8 => {
624                let layout = Layout::new::<QuotationObject>();
625                dealloc(obj as *mut u8, layout);
626            }
627            // TODO: Add other types as needed
628            _ => {
629                // Unknown type, use minimum HeapObject size
630                let layout = Layout::new::<HeapObject>();
631                dealloc(obj as *mut u8, layout);
632            }
633        }
634    }
635}
636
637/// Increment the reference count of a heap object
638///
639/// # Safety
640/// `obj` must be a valid pointer to a HeapObject.
641#[unsafe(no_mangle)]
642pub unsafe extern "C" fn seq_heap_incref(obj: *mut HeapObject) {
643    if obj.is_null() {
644        return;
645    }
646    unsafe {
647        if !(*obj).is_static() {
648            let rc = &(*obj).refcount as *const u32 as *mut u32;
649            std::sync::atomic::AtomicU32::from_ptr(rc)
650                .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
651        }
652    }
653}
654
655/// Decrement the reference count of a heap object, freeing if zero
656///
657/// # Safety
658/// `obj` must be a valid pointer to a HeapObject.
659#[unsafe(no_mangle)]
660pub unsafe extern "C" fn seq_heap_decref(obj: *mut HeapObject) {
661    if obj.is_null() {
662        return;
663    }
664    unsafe {
665        if !(*obj).is_static() {
666            let rc = &(*obj).refcount as *const u32 as *mut u32;
667            let old = std::sync::atomic::AtomicU32::from_ptr(rc)
668                .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
669            if old == 1 {
670                seq_free_heap_object(obj);
671            }
672        }
673    }
674}
675
676/// Get the float value from a FloatObject
677///
678/// # Safety
679/// `obj` must be a valid pointer to a FloatObject.
680#[unsafe(no_mangle)]
681pub unsafe extern "C" fn seq_get_float(obj: *mut HeapObject) -> f64 {
682    assert!(!obj.is_null(), "seq_get_float: null object");
683    unsafe {
684        assert!(
685            (*obj).tag == HeapTag::Float as u8,
686            "seq_get_float: not a float"
687        );
688        (*(obj as *mut FloatObject)).value
689    }
690}
691
692/// Get the bool value from a BoolObject
693///
694/// # Safety
695/// `obj` must be a valid pointer to a BoolObject.
696#[unsafe(no_mangle)]
697pub unsafe extern "C" fn seq_get_bool(obj: *mut HeapObject) -> bool {
698    assert!(!obj.is_null(), "seq_get_bool: null object");
699    unsafe {
700        assert!(
701            (*obj).tag == HeapTag::Bool as u8,
702            "seq_get_bool: not a bool"
703        );
704        (*(obj as *mut BoolObject)).value
705    }
706}
707
708// =============================================================================
709// Tests
710// =============================================================================
711
712#[cfg(test)]
713mod tests {
714    use super::*;
715
716    #[test]
717    fn test_tag_untag_int() {
718        // Positive integers
719        assert_eq!(untag_int(tag_int(0)), 0);
720        assert_eq!(untag_int(tag_int(1)), 1);
721        assert_eq!(untag_int(tag_int(42)), 42);
722        assert_eq!(untag_int(tag_int(1000000)), 1000000);
723
724        // Negative integers
725        assert_eq!(untag_int(tag_int(-1)), -1);
726        assert_eq!(untag_int(tag_int(-42)), -42);
727        assert_eq!(untag_int(tag_int(-1000000)), -1000000);
728
729        // Edge cases (63-bit range)
730        let max_63bit = (1i64 << 62) - 1;
731        let min_63bit = -(1i64 << 62);
732        assert_eq!(untag_int(tag_int(max_63bit)), max_63bit);
733        assert_eq!(untag_int(tag_int(min_63bit)), min_63bit);
734    }
735
736    #[test]
737    fn test_is_tagged() {
738        // Integers have low bit set
739        assert!(is_tagged_int(tag_int(0)));
740        assert!(is_tagged_int(tag_int(42)));
741        assert!(is_tagged_int(tag_int(-1)));
742
743        // Pointers have low bit clear (use a fake aligned address)
744        let fake_ptr = 0x1000u64; // 8-byte aligned
745        assert!(is_tagged_heap(fake_ptr));
746        assert!(!is_tagged_int(fake_ptr));
747    }
748
749    #[test]
750    fn test_tagged_int_examples() {
751        // From design doc: Integer `42` → `0x55` (42 << 1 | 1 = 85)
752        assert_eq!(tag_int(42), 85);
753        assert_eq!(tag_int(42), 0x55);
754
755        // Integer `-1` → `0xFFFFFFFFFFFFFFFF` (-1 << 1 | 1)
756        assert_eq!(tag_int(-1), 0xFFFFFFFFFFFFFFFFu64);
757    }
758
759    #[test]
760    fn test_stack_basic_operations() {
761        let mut stack = TaggedStack::new(16);
762
763        assert!(stack.is_empty());
764        assert_eq!(stack.depth(), 0);
765
766        // Push integers
767        stack.push_int(10);
768        stack.push_int(20);
769        stack.push_int(30);
770
771        assert!(!stack.is_empty());
772        assert_eq!(stack.depth(), 3);
773
774        // Pop and verify
775        assert_eq!(stack.pop_int(), 30);
776        assert_eq!(stack.pop_int(), 20);
777        assert_eq!(stack.pop_int(), 10);
778
779        assert!(stack.is_empty());
780    }
781
782    #[test]
783    fn test_stack_peek() {
784        let mut stack = TaggedStack::new(16);
785        stack.push_int(42);
786
787        // With Value layout: slot0 = discriminant (0 for Int), slot1 = value
788        let peeked = stack.peek();
789        assert_eq!(peeked.slot0, 0); // Int discriminant
790        assert_eq!(peeked.slot1 as i64, 42); // Value in slot1
791        assert_eq!(stack.depth(), 1); // Still there
792
793        assert_eq!(stack.pop_int(), 42);
794        assert!(stack.is_empty());
795    }
796
797    #[test]
798    fn test_stack_grow() {
799        let mut stack = TaggedStack::new(4);
800
801        // Fill beyond initial capacity
802        for i in 0..100 {
803            stack.push_int(i);
804        }
805
806        assert_eq!(stack.depth(), 100);
807        assert!(stack.capacity >= 100);
808
809        // Verify all values
810        for i in (0..100).rev() {
811            assert_eq!(stack.pop_int(), i);
812        }
813    }
814
815    #[test]
816    fn test_stack_clone() {
817        let mut stack = TaggedStack::new(16);
818        stack.push_int(1);
819        stack.push_int(2);
820        stack.push_int(3);
821
822        let mut cloned = stack.clone_stack();
823
824        // Both should have same values
825        assert_eq!(cloned.pop_int(), 3);
826        assert_eq!(cloned.pop_int(), 2);
827        assert_eq!(cloned.pop_int(), 1);
828
829        // Original should be unchanged
830        assert_eq!(stack.pop_int(), 3);
831        assert_eq!(stack.pop_int(), 2);
832        assert_eq!(stack.pop_int(), 1);
833    }
834
835    #[test]
836    fn test_ffi_stack_new_free() {
837        let stack = seq_stack_new(64);
838        assert!(!stack.is_null());
839
840        unsafe {
841            assert_eq!(seq_stack_capacity(stack), 64);
842            assert_eq!(seq_stack_sp(stack), 0);
843
844            seq_stack_free(stack);
845        }
846    }
847
848    #[test]
849    fn test_float_object() {
850        let obj = seq_alloc_float(2.5);
851        assert!(!obj.is_null());
852
853        unsafe {
854            assert_eq!((*obj).tag, HeapTag::Float as u8);
855            assert_eq!((*obj).refcount, 1);
856            assert_eq!(seq_get_float(obj), 2.5);
857
858            // Verify it's 8-byte aligned
859            assert!((obj as usize) & 0x7 == 0);
860
861            seq_free_heap_object(obj);
862        }
863    }
864
865    #[test]
866    fn test_bool_object() {
867        let obj_true = seq_alloc_bool(true);
868        let obj_false = seq_alloc_bool(false);
869
870        unsafe {
871            assert!(seq_get_bool(obj_true));
872            assert!(!seq_get_bool(obj_false));
873
874            seq_free_heap_object(obj_true);
875            seq_free_heap_object(obj_false);
876        }
877    }
878
879    #[test]
880    fn test_refcount() {
881        let obj = seq_alloc_float(1.0);
882
883        unsafe {
884            assert_eq!((*obj).refcount, 1);
885
886            seq_heap_incref(obj);
887            assert_eq!((*obj).refcount, 2);
888
889            seq_heap_incref(obj);
890            assert_eq!((*obj).refcount, 3);
891
892            seq_heap_decref(obj);
893            assert_eq!((*obj).refcount, 2);
894
895            seq_heap_decref(obj);
896            assert_eq!((*obj).refcount, 1);
897
898            // Final decref should free
899            seq_heap_decref(obj);
900            // Can't check after free, but shouldn't crash
901        }
902    }
903
904    #[test]
905    fn test_stack_with_heap_objects() {
906        let mut stack = TaggedStack::new(16);
907
908        // Push an integer
909        stack.push_int(42);
910
911        // Push a float (heap object)
912        let float_obj = seq_alloc_float(2.5);
913        stack.push(StackValue {
914            slot0: tag_heap_ptr(float_obj),
915            slot1: 0,
916            slot2: 0,
917            slot3: 0,
918            slot4: 0,
919        });
920
921        // Push another integer
922        stack.push_int(100);
923
924        assert_eq!(stack.depth(), 3);
925
926        // Pop and verify
927        assert_eq!(stack.pop_int(), 100);
928
929        let val = stack.pop();
930        assert!(is_tagged_heap(val.slot0));
931        unsafe {
932            assert_eq!(seq_get_float(untag_heap_ptr(val.slot0)), 2.5);
933        }
934
935        assert_eq!(stack.pop_int(), 42);
936
937        // Note: float_obj refcount was 1, we popped it without decref,
938        // so it's still alive. In real code, drop would decref.
939        unsafe {
940            seq_free_heap_object(float_obj);
941        }
942    }
943
944    #[test]
945    fn test_stack_value_size() {
946        // Verify StackValue is 40 bytes, matching LLVM's %Value type
947        // (5 x i64 = 5 x 8 = 40 bytes, compatible with Rust's Value with #[repr(C)])
948        assert_eq!(std::mem::size_of::<StackValue>(), 40);
949        assert_eq!(STACK_VALUE_SIZE, 40);
950    }
951}