Skip to main content

seq_core/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! Stack operations using 8-byte tagged values (tagged pointers).
4//!
5//! Encoding:
6//! - Odd (bit 0 = 1): Int — 63-bit signed integer, value = tagged >> 1
7//! - 0x0: Bool false
8//! - 0x2: Bool true
9//! - Even > 2: Heap pointer to Arc<Value>
10//!
11//! The Stack type is a pointer to the "current position" (where next push goes).
12//! - Push: store at *sp, return sp + 1
13//! - Pop: return sp - 1, read from *(sp - 1)
14
15use crate::tagged_stack::{StackValue, TAG_FALSE, TAG_TRUE, is_tagged_int, tag_int, untag_int};
16use crate::value::Value;
17use std::sync::Arc;
18
19/// Stack: A pointer to the current position in a contiguous array of u64.
20pub type Stack = *mut StackValue;
21
22/// Returns the size of a StackValue in bytes
23#[inline]
24pub fn stack_value_size() -> usize {
25    std::mem::size_of::<StackValue>()
26}
27
28/// Discriminant constants — retained for API compatibility with codegen and
29/// runtime code that switches on type. In tagged-ptr mode, these values are
30/// NOT stored in the StackValue itself (the tag is in the pointer bits).
31/// They are used only when the runtime unpacks a Value (via pop()) and needs
32/// to identify its type. Phase 2 codegen will use bit-level tag checks instead
33/// of loading these discriminants from memory.
34pub const DISC_INT: u64 = 0;
35pub const DISC_FLOAT: u64 = 1;
36pub const DISC_BOOL: u64 = 2;
37pub const DISC_STRING: u64 = 3;
38pub const DISC_VARIANT: u64 = 4;
39pub const DISC_MAP: u64 = 5;
40pub const DISC_QUOTATION: u64 = 6;
41pub const DISC_CLOSURE: u64 = 7;
42pub const DISC_CHANNEL: u64 = 8;
43pub const DISC_WEAVECTX: u64 = 9;
44pub const DISC_SYMBOL: u64 = 10;
45
46/// Convert a Value to a tagged StackValue
47#[inline]
48pub fn value_to_stack_value(value: Value) -> StackValue {
49    match value {
50        Value::Int(i) => tag_int(i),
51        Value::Bool(false) => TAG_FALSE,
52        Value::Bool(true) => TAG_TRUE,
53        other => {
54            // Heap-allocate via Arc for O(1) clone (refcount bump)
55            Arc::into_raw(Arc::new(other)) as u64
56        }
57    }
58}
59
60/// Convert a tagged StackValue back to a Value (takes ownership)
61///
62/// # Safety
63/// The StackValue must contain valid data — either a tagged int, bool,
64/// or a valid heap pointer from Arc::into_raw.
65#[inline]
66pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
67    if is_tagged_int(sv) {
68        Value::Int(untag_int(sv))
69    } else if sv == TAG_FALSE {
70        Value::Bool(false)
71    } else if sv == TAG_TRUE {
72        Value::Bool(true)
73    } else {
74        // Heap pointer — take ownership of the Arc<Value>
75        let arc = unsafe { Arc::from_raw(sv as *const Value) };
76        // Try to unwrap without cloning if we're the sole owner.
77        // Clone fallback happens when the value was dup'd on the stack
78        // (multiple Arc references exist and haven't been dropped yet).
79        Arc::try_unwrap(arc).unwrap_or_else(|arc| (*arc).clone())
80    }
81}
82
83/// Clone a StackValue from LLVM IR.
84///
85/// # Safety
86/// src and dst must be valid pointers to StackValue slots.
87#[unsafe(no_mangle)]
88pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
89    unsafe {
90        let sv = *src;
91        let cloned = clone_stack_value(sv);
92        *dst = cloned;
93    }
94}
95
96/// Clone a tagged StackValue, handling heap types.
97///
98/// - Int, Bool: bitwise copy (no allocation)
99/// - Heap types: clone the Value and re-box
100///
101/// # Safety
102/// The StackValue must contain valid tagged data.
103#[inline]
104pub unsafe fn clone_stack_value(sv: StackValue) -> StackValue {
105    if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
106        // Int or Bool — just copy
107        sv
108    } else {
109        // Heap pointer — increment Arc refcount (O(1), no allocation)
110        unsafe {
111            let arc = Arc::from_raw(sv as *const Value);
112            let cloned = Arc::clone(&arc);
113            std::mem::forget(arc); // Don't decrement the original
114            Arc::into_raw(cloned) as u64
115        }
116    }
117}
118
119/// Drop a tagged StackValue, freeing heap types.
120///
121/// # Safety
122/// The StackValue must be valid and not previously dropped.
123#[inline]
124pub unsafe fn drop_stack_value(sv: StackValue) {
125    if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
126        // Int or Bool — nothing to do
127        return;
128    }
129    // Heap pointer — decrement Arc refcount, free if last reference
130    unsafe {
131        let _ = Arc::from_raw(sv as *const Value);
132    }
133}
134
135// ============================================================================
136// Core Stack Operations
137// ============================================================================
138
139/// Push a value onto the stack.
140///
141/// # Safety
142/// Stack pointer must be valid and have room for the value.
143#[inline]
144pub unsafe fn push(stack: Stack, value: Value) -> Stack {
145    unsafe {
146        let sv = value_to_stack_value(value);
147        *stack = sv;
148        stack.add(1)
149    }
150}
151
152/// Push a StackValue directly onto the stack.
153///
154/// # Safety
155/// Stack pointer must be valid and have room for the value.
156#[inline]
157pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
158    unsafe {
159        *stack = sv;
160        stack.add(1)
161    }
162}
163
164/// Pop a value from the stack.
165///
166/// # Safety
167/// Stack must have at least one value.
168#[inline]
169pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
170    unsafe {
171        let new_sp = stack.sub(1);
172        let sv = *new_sp;
173        (new_sp, stack_value_to_value(sv))
174    }
175}
176
177/// Pop a StackValue directly from the stack.
178///
179/// # Safety
180/// Stack must have at least one value.
181#[inline]
182pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
183    unsafe {
184        let new_sp = stack.sub(1);
185        let sv = *new_sp;
186        (new_sp, sv)
187    }
188}
189
190/// Pop two values from the stack.
191///
192/// # Safety
193/// Stack must have at least two values.
194#[inline]
195pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
196    unsafe {
197        let (sp, b) = pop(stack);
198        let (sp, a) = pop(sp);
199        (sp, a, b)
200    }
201}
202
203/// Pop three values from the stack.
204///
205/// # Safety
206/// Stack must have at least three values.
207#[inline]
208pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
209    unsafe {
210        let (sp, c) = pop(stack);
211        let (sp, b) = pop(sp);
212        let (sp, a) = pop(sp);
213        (sp, a, b, c)
214    }
215}
216
217/// Peek at the top value without removing it.
218///
219/// # Safety
220/// Stack must have at least one value.
221#[inline]
222pub unsafe fn peek(stack: Stack) -> Value {
223    unsafe {
224        let sv = *stack.sub(1);
225        let cloned = clone_stack_value(sv);
226        stack_value_to_value(cloned)
227    }
228}
229
230/// Peek at the raw StackValue without removing it.
231///
232/// # Safety
233/// Stack must have at least one value.
234#[inline]
235pub unsafe fn peek_sv(stack: Stack) -> StackValue {
236    unsafe { *stack.sub(1) }
237}
238
239/// Get a mutable reference to a heap Value at the given stack position
240/// without popping (no Arc alloc/dealloc cycle).
241///
242/// Returns `Some(&mut Value)` if the slot is a sole-owned heap value.
243/// Returns `None` if the slot is inline (Int/Bool) or shared (refcount > 1).
244///
245/// Sole ownership is verified via `Arc::get_mut`, which atomically checks
246/// both strong and weak refcounts — the same guard used throughout the
247/// codebase for COW mutations.
248///
249/// The caller MUST NOT move or replace the Value behind the reference —
250/// it is still owned by the Arc on the stack. Mutating fields in place
251/// (e.g., Vec::push on VariantData.fields) is the intended use.
252///
253/// # Safety
254/// - `slot` must point to a valid StackValue within the stack.
255/// - The stack must not be concurrently accessed (true for strand-local stacks).
256/// - The returned reference is bounded by lifetime `'a`; the caller must
257///   ensure it does not outlive the stack slot.
258///
259/// # Tagged-value encoding
260/// The inline-value guard covers all non-heap encodings exhaustively:
261/// Int (odd bits), Bool false (0x0), Bool true (0x2). Every other value
262/// (even > 2) is a valid `Arc<Value>` heap pointer.
263#[inline]
264pub unsafe fn heap_value_mut<'a>(slot: *mut StackValue) -> Option<&'a mut Value> {
265    unsafe {
266        let sv = *slot;
267        // All non-heap encodings: Int (odd), Bool false (0x0), Bool true (0x2)
268        if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
269            return None;
270        }
271        // Reconstruct Arc, check sole ownership via Arc::get_mut (atomic check
272        // of both strong and weak refcounts), then forget to leave it on the stack.
273        let mut arc = Arc::from_raw(sv as *const Value);
274        let val_ref = Arc::get_mut(&mut arc).map(|v| &mut *(v as *mut Value));
275        std::mem::forget(arc); // Don't decrement — Arc stays on the stack
276        val_ref
277    }
278}
279
280/// Convenience: get a mutable reference to the heap Value at stack top (sp - 1).
281///
282/// # Safety
283/// Stack must have at least one value. See `heap_value_mut` for lifetime rules.
284#[inline]
285pub unsafe fn peek_heap_mut<'a>(stack: Stack) -> Option<&'a mut Value> {
286    unsafe { heap_value_mut(stack.sub(1)) }
287}
288
289/// Convenience: get a mutable reference to the heap Value at sp - 2
290/// (second from top).
291///
292/// # Safety
293/// Stack must have at least two values. See `heap_value_mut` for lifetime rules.
294#[inline]
295pub unsafe fn peek_heap_mut_second<'a>(stack: Stack) -> Option<&'a mut Value> {
296    unsafe { heap_value_mut(stack.sub(2)) }
297}
298
299// ============================================================================
300// FFI Stack Operations
301// ============================================================================
302
303/// Duplicate the top value: ( a -- a a )
304///
305/// # Safety
306/// Stack must have at least one value.
307#[unsafe(no_mangle)]
308pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
309    unsafe {
310        let sv = peek_sv(stack);
311        let cloned = clone_stack_value(sv);
312        push_sv(stack, cloned)
313    }
314}
315
316/// Drop the top value: ( a -- )
317///
318/// # Safety
319/// Stack must have at least one value.
320#[inline]
321pub unsafe fn drop_top(stack: Stack) -> Stack {
322    unsafe {
323        let (new_sp, sv) = pop_sv(stack);
324        drop_stack_value(sv);
325        new_sp
326    }
327}
328
329/// # Safety
330/// Stack must have at least one value.
331#[unsafe(no_mangle)]
332pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
333    unsafe { drop_top(stack) }
334}
335
336/// # Safety
337/// Stack pointer must be valid and have room for the value.
338#[allow(improper_ctypes_definitions)]
339#[unsafe(no_mangle)]
340pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
341    unsafe { push(stack, value) }
342}
343
344/// Swap the top two values: ( a b -- b a )
345///
346/// # Safety
347/// Stack must have at least two values.
348#[unsafe(no_mangle)]
349pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
350    unsafe {
351        let ptr_b = stack.sub(1);
352        let ptr_a = stack.sub(2);
353        let a = *ptr_a;
354        let b = *ptr_b;
355        *ptr_a = b;
356        *ptr_b = a;
357        stack
358    }
359}
360
361/// Copy the second value to the top: ( a b -- a b a )
362///
363/// # Safety
364/// Stack must have at least two values.
365#[unsafe(no_mangle)]
366pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
367    unsafe {
368        let sv_a = *stack.sub(2);
369        let cloned = clone_stack_value(sv_a);
370        push_sv(stack, cloned)
371    }
372}
373
374/// Rotate the top three values: ( a b c -- b c a )
375///
376/// # Safety
377/// Stack must have at least three values.
378#[unsafe(no_mangle)]
379pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
380    unsafe {
381        let ptr_c = stack.sub(1);
382        let ptr_b = stack.sub(2);
383        let ptr_a = stack.sub(3);
384        let a = *ptr_a;
385        let b = *ptr_b;
386        let c = *ptr_c;
387        *ptr_a = b;
388        *ptr_b = c;
389        *ptr_c = a;
390        stack
391    }
392}
393
394/// Remove the second value: ( a b -- b )
395///
396/// # Safety
397/// Stack must have at least two values.
398#[unsafe(no_mangle)]
399pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
400    unsafe {
401        let ptr_b = stack.sub(1);
402        let ptr_a = stack.sub(2);
403        let a = *ptr_a;
404        let b = *ptr_b;
405        drop_stack_value(a);
406        *ptr_a = b;
407        stack.sub(1)
408    }
409}
410
411/// Copy top value below second: ( a b -- b a b )
412///
413/// # Safety
414/// Stack must have at least two values.
415#[unsafe(no_mangle)]
416pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
417    unsafe {
418        let ptr_b = stack.sub(1);
419        let ptr_a = stack.sub(2);
420        let a = *ptr_a;
421        let b = *ptr_b;
422        let b_clone = clone_stack_value(b);
423        *ptr_a = b;
424        *ptr_b = a;
425        push_sv(stack, b_clone)
426    }
427}
428
429/// Duplicate top two values: ( a b -- a b a b )
430///
431/// # Safety
432/// Stack must have at least two values.
433#[unsafe(no_mangle)]
434pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
435    unsafe {
436        let sv_a = *stack.sub(2);
437        let sv_b = *stack.sub(1);
438        let a_clone = clone_stack_value(sv_a);
439        let b_clone = clone_stack_value(sv_b);
440        let sp = push_sv(stack, a_clone);
441        push_sv(sp, b_clone)
442    }
443}
444
445/// Pick: Copy the nth value to the top.
446///
447/// # Safety
448/// Stack must have at least n+2 values (n+1 data values plus the index).
449#[unsafe(no_mangle)]
450pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
451    unsafe {
452        let (sp, n_val) = pop(stack);
453        let n_raw = match n_val {
454            Value::Int(i) => i,
455            _ => {
456                crate::error::set_runtime_error("pick: expected Int index on top of stack");
457                return sp;
458            }
459        };
460
461        if n_raw < 0 {
462            crate::error::set_runtime_error(format!(
463                "pick: index cannot be negative (got {})",
464                n_raw
465            ));
466            return sp;
467        }
468        let n = n_raw as usize;
469
470        let base = get_stack_base();
471        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
472        if n >= depth {
473            crate::error::set_runtime_error(format!(
474                "pick: index {} exceeds stack depth {} (need at least {} values)",
475                n,
476                depth,
477                n + 1
478            ));
479            return sp;
480        }
481
482        let sv = *sp.sub(n + 1);
483        let cloned = clone_stack_value(sv);
484        push_sv(sp, cloned)
485    }
486}
487
488/// Roll: Rotate n+1 items, bringing the item at depth n to the top.
489///
490/// # Safety
491/// Stack must have at least n+2 values (n+1 data values plus the index).
492#[unsafe(no_mangle)]
493pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
494    unsafe {
495        let (sp, n_val) = pop(stack);
496        let n_raw = match n_val {
497            Value::Int(i) => i,
498            _ => {
499                crate::error::set_runtime_error("roll: expected Int index on top of stack");
500                return sp;
501            }
502        };
503
504        if n_raw < 0 {
505            crate::error::set_runtime_error(format!(
506                "roll: index cannot be negative (got {})",
507                n_raw
508            ));
509            return sp;
510        }
511        let n = n_raw as usize;
512
513        if n == 0 {
514            return sp;
515        }
516        if n == 1 {
517            return patch_seq_swap(sp);
518        }
519        if n == 2 {
520            return patch_seq_rot(sp);
521        }
522
523        let base = get_stack_base();
524        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
525        if n >= depth {
526            crate::error::set_runtime_error(format!(
527                "roll: index {} exceeds stack depth {} (need at least {} values)",
528                n,
529                depth,
530                n + 1
531            ));
532            return sp;
533        }
534
535        let src_ptr = sp.sub(n + 1);
536        let saved = *src_ptr;
537        std::ptr::copy(src_ptr.add(1), src_ptr, n);
538        *sp.sub(1) = saved;
539
540        sp
541    }
542}
543
544/// Clone a stack segment.
545///
546/// # Safety
547/// Both src and dst must be valid stack pointers with sufficient space for count values.
548pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
549    unsafe {
550        for i in 0..count {
551            let sv = *src.sub(count - i);
552            let cloned = clone_stack_value(sv);
553            *dst.add(i) = cloned;
554        }
555    }
556}
557
558// ============================================================================
559// Coroutine-Local Stack Base Tracking
560// ============================================================================
561
562use std::cell::Cell;
563
564may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
565
566/// # Safety
567/// Base pointer must be a valid stack pointer for the current strand.
568#[unsafe(no_mangle)]
569pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
570    STACK_BASE.with(|cell| {
571        cell.set(base as usize);
572    });
573}
574
575#[inline]
576pub fn get_stack_base() -> Stack {
577    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
578}
579
580/// # Safety
581/// Current stack must have a valid base set via `patch_seq_set_stack_base`.
582#[unsafe(no_mangle)]
583pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
584    unsafe {
585        let (new_sp, _base) = clone_stack_with_base(sp);
586        new_sp
587    }
588}
589
590/// # Safety
591/// Current stack must have a valid base set and sp must point within the stack.
592pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
593    let base = get_stack_base();
594    if base.is_null() {
595        panic!("clone_stack: stack base not set");
596    }
597
598    let depth = unsafe { sp.offset_from(base) as usize };
599
600    if depth == 0 {
601        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
602        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
603        let new_base = new_stack.base;
604        std::mem::forget(new_stack);
605        return (new_base, new_base);
606    }
607
608    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
609    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
610    let new_stack = TaggedStack::new(capacity);
611    let new_base = new_stack.base;
612    std::mem::forget(new_stack);
613
614    unsafe {
615        for i in 0..depth {
616            let sv = *base.add(i);
617            let cloned = clone_stack_value(sv);
618            *new_base.add(i) = cloned;
619        }
620    }
621
622    unsafe { (new_base.add(depth), new_base) }
623}
624
625// ============================================================================
626// Stack Allocation Helpers
627// ============================================================================
628
629pub fn alloc_stack() -> Stack {
630    use crate::tagged_stack::TaggedStack;
631    let stack = TaggedStack::with_default_capacity();
632    let base = stack.base;
633    std::mem::forget(stack);
634    base
635}
636
637pub fn alloc_test_stack() -> Stack {
638    let stack = alloc_stack();
639    unsafe { patch_seq_set_stack_base(stack) };
640    stack
641}
642
643/// Dump all values on the stack (for REPL debugging).
644///
645/// # Safety
646/// Stack base must have been set and sp must be valid.
647#[unsafe(no_mangle)]
648pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
649    let base = get_stack_base();
650    if base.is_null() {
651        eprintln!("[stack.dump: base not set]");
652        return sp;
653    }
654
655    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
656
657    if depth == 0 {
658        println!("»");
659    } else {
660        use std::io::Write;
661        print!("» ");
662        for i in 0..depth {
663            if i > 0 {
664                print!(" ");
665            }
666            unsafe {
667                let sv = *base.add(i);
668                print_stack_value(sv);
669            }
670        }
671        println!();
672        let _ = std::io::stdout().flush();
673
674        // Drop all heap-allocated values
675        for i in 0..depth {
676            unsafe {
677                let sv = *base.add(i);
678                drop_stack_value(sv);
679            }
680        }
681    }
682
683    base
684}
685
686fn print_stack_value(sv: StackValue) {
687    use crate::son::{SonConfig, value_to_son};
688
689    let cloned = unsafe { clone_stack_value(sv) };
690    let value = unsafe { stack_value_to_value(cloned) };
691    let son = value_to_son(&value, &SonConfig::compact());
692    print!("{}", son);
693}
694
695// ============================================================================
696// Short Aliases for Internal/Test Use
697// ============================================================================
698
699pub use patch_seq_2dup as two_dup;
700pub use patch_seq_dup as dup;
701pub use patch_seq_nip as nip;
702pub use patch_seq_over as over;
703pub use patch_seq_pick_op as pick;
704pub use patch_seq_roll as roll;
705pub use patch_seq_rot as rot;
706pub use patch_seq_swap as swap;
707pub use patch_seq_tuck as tuck;
708
709#[macro_export]
710macro_rules! test_stack {
711    () => {{ $crate::stack::alloc_test_stack() }};
712}
713
714#[cfg(test)]
715mod tests {
716    use super::*;
717
718    #[test]
719    fn test_pick_negative_index_sets_error() {
720        unsafe {
721            crate::error::clear_runtime_error();
722            let stack = alloc_test_stack();
723            let stack = push(stack, Value::Int(100));
724            let stack = push(stack, Value::Int(-1));
725
726            let _stack = patch_seq_pick_op(stack);
727
728            assert!(crate::error::has_runtime_error());
729            let error = crate::error::take_runtime_error().unwrap();
730            assert!(error.contains("negative"));
731        }
732    }
733
734    #[test]
735    fn test_pick_out_of_bounds_sets_error() {
736        unsafe {
737            crate::error::clear_runtime_error();
738            let stack = alloc_test_stack();
739            let stack = push(stack, Value::Int(100));
740            let stack = push(stack, Value::Int(10));
741
742            let _stack = patch_seq_pick_op(stack);
743
744            assert!(crate::error::has_runtime_error());
745            let error = crate::error::take_runtime_error().unwrap();
746            assert!(error.contains("exceeds stack depth"));
747        }
748    }
749
750    #[test]
751    fn test_roll_negative_index_sets_error() {
752        unsafe {
753            crate::error::clear_runtime_error();
754            let stack = alloc_test_stack();
755            let stack = push(stack, Value::Int(100));
756            let stack = push(stack, Value::Int(-1));
757
758            let _stack = patch_seq_roll(stack);
759
760            assert!(crate::error::has_runtime_error());
761            let error = crate::error::take_runtime_error().unwrap();
762            assert!(error.contains("negative"));
763        }
764    }
765
766    #[test]
767    fn test_roll_out_of_bounds_sets_error() {
768        unsafe {
769            crate::error::clear_runtime_error();
770            let stack = alloc_test_stack();
771            let stack = push(stack, Value::Int(100));
772            let stack = push(stack, Value::Int(10));
773
774            let _stack = patch_seq_roll(stack);
775
776            assert!(crate::error::has_runtime_error());
777            let error = crate::error::take_runtime_error().unwrap();
778            assert!(error.contains("exceeds stack depth"));
779        }
780    }
781
782    #[test]
783    fn test_int_roundtrip() {
784        unsafe {
785            let stack = alloc_test_stack();
786            let stack = push(stack, Value::Int(42));
787            let (_, val) = pop(stack);
788            assert_eq!(val, Value::Int(42));
789        }
790    }
791
792    #[test]
793    fn test_bool_roundtrip() {
794        unsafe {
795            let stack = alloc_test_stack();
796            let stack = push(stack, Value::Bool(true));
797            let stack = push(stack, Value::Bool(false));
798            let (stack, val_f) = pop(stack);
799            let (_, val_t) = pop(stack);
800            assert_eq!(val_f, Value::Bool(false));
801            assert_eq!(val_t, Value::Bool(true));
802        }
803    }
804
805    #[test]
806    fn test_float_roundtrip() {
807        unsafe {
808            let stack = alloc_test_stack();
809            let stack = push(stack, Value::Float(std::f64::consts::PI));
810            let (_, val) = pop(stack);
811            assert_eq!(val, Value::Float(std::f64::consts::PI));
812        }
813    }
814
815    #[test]
816    fn test_string_roundtrip() {
817        unsafe {
818            let stack = alloc_test_stack();
819            let s = crate::seqstring::SeqString::from("hello");
820            let stack = push(stack, Value::String(s));
821            let (_, val) = pop(stack);
822            match val {
823                Value::String(s) => assert_eq!(s.as_str(), "hello"),
824                other => panic!("Expected String, got {:?}", other),
825            }
826        }
827    }
828
829    #[test]
830    fn test_symbol_roundtrip() {
831        unsafe {
832            let stack = alloc_test_stack();
833            let s = crate::seqstring::SeqString::from("my-sym");
834            let stack = push(stack, Value::Symbol(s));
835            let (_, val) = pop(stack);
836            match val {
837                Value::Symbol(s) => assert_eq!(s.as_str(), "my-sym"),
838                other => panic!("Expected Symbol, got {:?}", other),
839            }
840        }
841    }
842
843    #[test]
844    fn test_variant_roundtrip() {
845        unsafe {
846            let stack = alloc_test_stack();
847            let tag = crate::seqstring::SeqString::from("Foo");
848            let data = crate::value::VariantData::new(tag, vec![Value::Int(1), Value::Int(2)]);
849            let stack = push(stack, Value::Variant(std::sync::Arc::new(data)));
850            let (_, val) = pop(stack);
851            match val {
852                Value::Variant(v) => {
853                    assert_eq!(v.tag.as_str(), "Foo");
854                    assert_eq!(v.fields.len(), 2);
855                }
856                other => panic!("Expected Variant, got {:?}", other),
857            }
858        }
859    }
860
861    #[test]
862    fn test_map_roundtrip() {
863        unsafe {
864            let stack = alloc_test_stack();
865            let mut map = std::collections::HashMap::new();
866            map.insert(crate::value::MapKey::Int(1), Value::Int(100));
867            let stack = push(stack, Value::Map(Box::new(map)));
868            let (_, val) = pop(stack);
869            match val {
870                Value::Map(m) => {
871                    assert_eq!(m.len(), 1);
872                    assert_eq!(m.get(&crate::value::MapKey::Int(1)), Some(&Value::Int(100)));
873                }
874                other => panic!("Expected Map, got {:?}", other),
875            }
876        }
877    }
878
879    #[test]
880    fn test_quotation_roundtrip() {
881        unsafe {
882            let stack = alloc_test_stack();
883            let stack = push(
884                stack,
885                Value::Quotation {
886                    wrapper: 0x1000,
887                    impl_: 0x2000,
888                },
889            );
890            let (_, val) = pop(stack);
891            match val {
892                Value::Quotation { wrapper, impl_ } => {
893                    assert_eq!(wrapper, 0x1000);
894                    assert_eq!(impl_, 0x2000);
895                }
896                other => panic!("Expected Quotation, got {:?}", other),
897            }
898        }
899    }
900
901    #[test]
902    fn test_closure_roundtrip() {
903        unsafe {
904            let stack = alloc_test_stack();
905            let env: std::sync::Arc<[Value]> = std::sync::Arc::from(vec![Value::Int(42)]);
906            let stack = push(
907                stack,
908                Value::Closure {
909                    fn_ptr: 0x3000,
910                    env,
911                },
912            );
913            let (_, val) = pop(stack);
914            match val {
915                Value::Closure { fn_ptr, env } => {
916                    assert_eq!(fn_ptr, 0x3000);
917                    assert_eq!(env.len(), 1);
918                }
919                other => panic!("Expected Closure, got {:?}", other),
920            }
921        }
922    }
923
924    #[test]
925    fn test_channel_roundtrip() {
926        unsafe {
927            let stack = alloc_test_stack();
928            let (sender, receiver) = may::sync::mpmc::channel();
929            let ch = std::sync::Arc::new(crate::value::ChannelData { sender, receiver });
930            let stack = push(stack, Value::Channel(ch));
931            let (_, val) = pop(stack);
932            assert!(matches!(val, Value::Channel(_)));
933        }
934    }
935
936    #[test]
937    fn test_weavectx_roundtrip() {
938        unsafe {
939            let stack = alloc_test_stack();
940            let (ys, yr) = may::sync::mpmc::channel();
941            let (rs, rr) = may::sync::mpmc::channel();
942            let yield_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
943                sender: ys,
944                receiver: yr,
945            });
946            let resume_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
947                sender: rs,
948                receiver: rr,
949            });
950            let stack = push(
951                stack,
952                Value::WeaveCtx {
953                    yield_chan,
954                    resume_chan,
955                },
956            );
957            let (_, val) = pop(stack);
958            assert!(matches!(val, Value::WeaveCtx { .. }));
959        }
960    }
961
962    #[test]
963    fn test_dup_pop_pop_heap_type() {
964        // Verify Arc refcount handling: push a heap value, dup it (refcount 2),
965        // then pop both. No double-free or corruption should occur.
966        unsafe {
967            let stack = alloc_test_stack();
968            let stack = push(stack, Value::Float(2.5));
969            // dup: clones via Arc refcount bump
970            let stack = patch_seq_dup(stack);
971            // pop both copies
972            let (stack, val1) = pop(stack);
973            let (_, val2) = pop(stack);
974            assert_eq!(val1, Value::Float(2.5));
975            assert_eq!(val2, Value::Float(2.5));
976        }
977    }
978}