Skip to main content

seq_core/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! Stack operations using 8-byte tagged values (tagged pointers).
4//!
5//! Encoding:
6//! - Odd (bit 0 = 1): Int — 63-bit signed integer, value = tagged >> 1
7//! - 0x0: Bool false
8//! - 0x2: Bool true
9//! - Even > 2: Heap pointer to Arc<Value>
10//!
11//! The Stack type is a pointer to the "current position" (where next push goes).
12//! - Push: store at *sp, return sp + 1
13//! - Pop: return sp - 1, read from *(sp - 1)
14
15use crate::tagged_stack::{StackValue, TAG_FALSE, TAG_TRUE, is_tagged_int, tag_int, untag_int};
16use crate::value::Value;
17use std::sync::Arc;
18
19/// Stack: A pointer to the current position in a contiguous array of u64.
20pub type Stack = *mut StackValue;
21
22/// Returns the size of a StackValue in bytes
23#[inline]
24pub fn stack_value_size() -> usize {
25    std::mem::size_of::<StackValue>()
26}
27
28/// Discriminant constants — retained for API compatibility with codegen and
29/// runtime code that switches on type. In tagged-ptr mode, these values are
30/// NOT stored in the StackValue itself (the tag is in the pointer bits).
31/// They are used only when the runtime unpacks a Value (via pop()) and needs
32/// to identify its type. Phase 2 codegen will use bit-level tag checks instead
33/// of loading these discriminants from memory.
34pub const DISC_INT: u64 = 0;
35pub const DISC_FLOAT: u64 = 1;
36pub const DISC_BOOL: u64 = 2;
37pub const DISC_STRING: u64 = 3;
38pub const DISC_VARIANT: u64 = 4;
39pub const DISC_MAP: u64 = 5;
40pub const DISC_QUOTATION: u64 = 6;
41pub const DISC_CLOSURE: u64 = 7;
42pub const DISC_CHANNEL: u64 = 8;
43pub const DISC_WEAVECTX: u64 = 9;
44pub const DISC_SYMBOL: u64 = 10;
45
46/// Convert a Value to a tagged StackValue
47#[inline]
48pub fn value_to_stack_value(value: Value) -> StackValue {
49    match value {
50        Value::Int(i) => tag_int(i),
51        Value::Bool(false) => TAG_FALSE,
52        Value::Bool(true) => TAG_TRUE,
53        other => {
54            // Heap-allocate via Arc for O(1) clone (refcount bump)
55            Arc::into_raw(Arc::new(other)) as u64
56        }
57    }
58}
59
60/// Convert a tagged StackValue back to a Value (takes ownership)
61///
62/// # Safety
63/// The StackValue must contain valid data — either a tagged int, bool,
64/// or a valid heap pointer from Arc::into_raw.
65#[inline]
66pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
67    if is_tagged_int(sv) {
68        Value::Int(untag_int(sv))
69    } else if sv == TAG_FALSE {
70        Value::Bool(false)
71    } else if sv == TAG_TRUE {
72        Value::Bool(true)
73    } else {
74        // Heap pointer — take ownership of the Arc<Value>
75        let arc = unsafe { Arc::from_raw(sv as *const Value) };
76        // Try to unwrap without cloning if we're the sole owner.
77        // Clone fallback happens when the value was dup'd on the stack
78        // (multiple Arc references exist and haven't been dropped yet).
79        Arc::try_unwrap(arc).unwrap_or_else(|arc| (*arc).clone())
80    }
81}
82
83/// Clone a StackValue from LLVM IR.
84///
85/// # Safety
86/// src and dst must be valid pointers to StackValue slots.
87#[unsafe(no_mangle)]
88pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
89    unsafe {
90        let sv = *src;
91        let cloned = clone_stack_value(sv);
92        *dst = cloned;
93    }
94}
95
96/// Clone a tagged StackValue, handling heap types.
97///
98/// - Int, Bool: bitwise copy (no allocation)
99/// - Heap types: clone the Value and re-box
100///
101/// # Safety
102/// The StackValue must contain valid tagged data.
103#[inline]
104pub unsafe fn clone_stack_value(sv: StackValue) -> StackValue {
105    if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
106        // Int or Bool — just copy
107        sv
108    } else {
109        // Heap pointer — increment Arc refcount (O(1), no allocation)
110        unsafe {
111            let arc = Arc::from_raw(sv as *const Value);
112            let cloned = Arc::clone(&arc);
113            std::mem::forget(arc); // Don't decrement the original
114            Arc::into_raw(cloned) as u64
115        }
116    }
117}
118
119/// Drop a tagged StackValue, freeing heap types.
120///
121/// # Safety
122/// The StackValue must be valid and not previously dropped.
123#[inline]
124pub unsafe fn drop_stack_value(sv: StackValue) {
125    if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
126        // Int or Bool — nothing to do
127        return;
128    }
129    // Heap pointer — decrement Arc refcount, free if last reference
130    unsafe {
131        let _ = Arc::from_raw(sv as *const Value);
132    }
133}
134
135// ============================================================================
136// Core Stack Operations
137// ============================================================================
138
139/// Push a value onto the stack.
140///
141/// # Safety
142/// Stack pointer must be valid and have room for the value.
143#[inline]
144pub unsafe fn push(stack: Stack, value: Value) -> Stack {
145    unsafe {
146        let sv = value_to_stack_value(value);
147        *stack = sv;
148        stack.add(1)
149    }
150}
151
152/// Push a StackValue directly onto the stack.
153///
154/// # Safety
155/// Stack pointer must be valid and have room for the value.
156#[inline]
157pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
158    unsafe {
159        *stack = sv;
160        stack.add(1)
161    }
162}
163
164/// Pop a value from the stack.
165///
166/// # Safety
167/// Stack must have at least one value.
168#[inline]
169pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
170    unsafe {
171        let new_sp = stack.sub(1);
172        let sv = *new_sp;
173        (new_sp, stack_value_to_value(sv))
174    }
175}
176
177/// Pop a StackValue directly from the stack.
178///
179/// # Safety
180/// Stack must have at least one value.
181#[inline]
182pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
183    unsafe {
184        let new_sp = stack.sub(1);
185        let sv = *new_sp;
186        (new_sp, sv)
187    }
188}
189
190/// Pop two values from the stack.
191///
192/// # Safety
193/// Stack must have at least two values.
194#[inline]
195pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
196    unsafe {
197        let (sp, b) = pop(stack);
198        let (sp, a) = pop(sp);
199        (sp, a, b)
200    }
201}
202
203/// Pop three values from the stack.
204///
205/// # Safety
206/// Stack must have at least three values.
207#[inline]
208pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
209    unsafe {
210        let (sp, c) = pop(stack);
211        let (sp, b) = pop(sp);
212        let (sp, a) = pop(sp);
213        (sp, a, b, c)
214    }
215}
216
217/// Peek at the top value without removing it.
218///
219/// # Safety
220/// Stack must have at least one value.
221#[inline]
222pub unsafe fn peek(stack: Stack) -> Value {
223    unsafe {
224        let sv = *stack.sub(1);
225        let cloned = clone_stack_value(sv);
226        stack_value_to_value(cloned)
227    }
228}
229
230/// Peek at the raw StackValue without removing it.
231///
232/// # Safety
233/// Stack must have at least one value.
234#[inline]
235pub unsafe fn peek_sv(stack: Stack) -> StackValue {
236    unsafe { *stack.sub(1) }
237}
238
239// ============================================================================
240// FFI Stack Operations
241// ============================================================================
242
243/// Duplicate the top value: ( a -- a a )
244///
245/// # Safety
246/// Stack must have at least one value.
247#[unsafe(no_mangle)]
248pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
249    unsafe {
250        let sv = peek_sv(stack);
251        let cloned = clone_stack_value(sv);
252        push_sv(stack, cloned)
253    }
254}
255
256/// Drop the top value: ( a -- )
257///
258/// # Safety
259/// Stack must have at least one value.
260#[inline]
261pub unsafe fn drop_top(stack: Stack) -> Stack {
262    unsafe {
263        let (new_sp, sv) = pop_sv(stack);
264        drop_stack_value(sv);
265        new_sp
266    }
267}
268
269/// # Safety
270/// Stack must have at least one value.
271#[unsafe(no_mangle)]
272pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
273    unsafe { drop_top(stack) }
274}
275
276/// # Safety
277/// Stack pointer must be valid and have room for the value.
278#[allow(improper_ctypes_definitions)]
279#[unsafe(no_mangle)]
280pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
281    unsafe { push(stack, value) }
282}
283
284/// Swap the top two values: ( a b -- b a )
285///
286/// # Safety
287/// Stack must have at least two values.
288#[unsafe(no_mangle)]
289pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
290    unsafe {
291        let ptr_b = stack.sub(1);
292        let ptr_a = stack.sub(2);
293        let a = *ptr_a;
294        let b = *ptr_b;
295        *ptr_a = b;
296        *ptr_b = a;
297        stack
298    }
299}
300
301/// Copy the second value to the top: ( a b -- a b a )
302///
303/// # Safety
304/// Stack must have at least two values.
305#[unsafe(no_mangle)]
306pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
307    unsafe {
308        let sv_a = *stack.sub(2);
309        let cloned = clone_stack_value(sv_a);
310        push_sv(stack, cloned)
311    }
312}
313
314/// Rotate the top three values: ( a b c -- b c a )
315///
316/// # Safety
317/// Stack must have at least three values.
318#[unsafe(no_mangle)]
319pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
320    unsafe {
321        let ptr_c = stack.sub(1);
322        let ptr_b = stack.sub(2);
323        let ptr_a = stack.sub(3);
324        let a = *ptr_a;
325        let b = *ptr_b;
326        let c = *ptr_c;
327        *ptr_a = b;
328        *ptr_b = c;
329        *ptr_c = a;
330        stack
331    }
332}
333
334/// Remove the second value: ( a b -- b )
335///
336/// # Safety
337/// Stack must have at least two values.
338#[unsafe(no_mangle)]
339pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
340    unsafe {
341        let ptr_b = stack.sub(1);
342        let ptr_a = stack.sub(2);
343        let a = *ptr_a;
344        let b = *ptr_b;
345        drop_stack_value(a);
346        *ptr_a = b;
347        stack.sub(1)
348    }
349}
350
351/// Copy top value below second: ( a b -- b a b )
352///
353/// # Safety
354/// Stack must have at least two values.
355#[unsafe(no_mangle)]
356pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
357    unsafe {
358        let ptr_b = stack.sub(1);
359        let ptr_a = stack.sub(2);
360        let a = *ptr_a;
361        let b = *ptr_b;
362        let b_clone = clone_stack_value(b);
363        *ptr_a = b;
364        *ptr_b = a;
365        push_sv(stack, b_clone)
366    }
367}
368
369/// Duplicate top two values: ( a b -- a b a b )
370///
371/// # Safety
372/// Stack must have at least two values.
373#[unsafe(no_mangle)]
374pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
375    unsafe {
376        let sv_a = *stack.sub(2);
377        let sv_b = *stack.sub(1);
378        let a_clone = clone_stack_value(sv_a);
379        let b_clone = clone_stack_value(sv_b);
380        let sp = push_sv(stack, a_clone);
381        push_sv(sp, b_clone)
382    }
383}
384
385/// Pick: Copy the nth value to the top.
386///
387/// # Safety
388/// Stack must have at least n+2 values (n+1 data values plus the index).
389#[unsafe(no_mangle)]
390pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
391    unsafe {
392        let (sp, n_val) = pop(stack);
393        let n_raw = match n_val {
394            Value::Int(i) => i,
395            _ => {
396                crate::error::set_runtime_error("pick: expected Int index on top of stack");
397                return sp;
398            }
399        };
400
401        if n_raw < 0 {
402            crate::error::set_runtime_error(format!(
403                "pick: index cannot be negative (got {})",
404                n_raw
405            ));
406            return sp;
407        }
408        let n = n_raw as usize;
409
410        let base = get_stack_base();
411        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
412        if n >= depth {
413            crate::error::set_runtime_error(format!(
414                "pick: index {} exceeds stack depth {} (need at least {} values)",
415                n,
416                depth,
417                n + 1
418            ));
419            return sp;
420        }
421
422        let sv = *sp.sub(n + 1);
423        let cloned = clone_stack_value(sv);
424        push_sv(sp, cloned)
425    }
426}
427
428/// Roll: Rotate n+1 items, bringing the item at depth n to the top.
429///
430/// # Safety
431/// Stack must have at least n+2 values (n+1 data values plus the index).
432#[unsafe(no_mangle)]
433pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
434    unsafe {
435        let (sp, n_val) = pop(stack);
436        let n_raw = match n_val {
437            Value::Int(i) => i,
438            _ => {
439                crate::error::set_runtime_error("roll: expected Int index on top of stack");
440                return sp;
441            }
442        };
443
444        if n_raw < 0 {
445            crate::error::set_runtime_error(format!(
446                "roll: index cannot be negative (got {})",
447                n_raw
448            ));
449            return sp;
450        }
451        let n = n_raw as usize;
452
453        if n == 0 {
454            return sp;
455        }
456        if n == 1 {
457            return patch_seq_swap(sp);
458        }
459        if n == 2 {
460            return patch_seq_rot(sp);
461        }
462
463        let base = get_stack_base();
464        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
465        if n >= depth {
466            crate::error::set_runtime_error(format!(
467                "roll: index {} exceeds stack depth {} (need at least {} values)",
468                n,
469                depth,
470                n + 1
471            ));
472            return sp;
473        }
474
475        let src_ptr = sp.sub(n + 1);
476        let saved = *src_ptr;
477        std::ptr::copy(src_ptr.add(1), src_ptr, n);
478        *sp.sub(1) = saved;
479
480        sp
481    }
482}
483
484/// Clone a stack segment.
485///
486/// # Safety
487/// Both src and dst must be valid stack pointers with sufficient space for count values.
488pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
489    unsafe {
490        for i in 0..count {
491            let sv = *src.sub(count - i);
492            let cloned = clone_stack_value(sv);
493            *dst.add(i) = cloned;
494        }
495    }
496}
497
498// ============================================================================
499// Coroutine-Local Stack Base Tracking
500// ============================================================================
501
502use std::cell::Cell;
503
504may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
505
506/// # Safety
507/// Base pointer must be a valid stack pointer for the current strand.
508#[unsafe(no_mangle)]
509pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
510    STACK_BASE.with(|cell| {
511        cell.set(base as usize);
512    });
513}
514
515#[inline]
516pub fn get_stack_base() -> Stack {
517    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
518}
519
520/// # Safety
521/// Current stack must have a valid base set via `patch_seq_set_stack_base`.
522#[unsafe(no_mangle)]
523pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
524    unsafe {
525        let (new_sp, _base) = clone_stack_with_base(sp);
526        new_sp
527    }
528}
529
530/// # Safety
531/// Current stack must have a valid base set and sp must point within the stack.
532pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
533    let base = get_stack_base();
534    if base.is_null() {
535        panic!("clone_stack: stack base not set");
536    }
537
538    let depth = unsafe { sp.offset_from(base) as usize };
539
540    if depth == 0 {
541        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
542        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
543        let new_base = new_stack.base;
544        std::mem::forget(new_stack);
545        return (new_base, new_base);
546    }
547
548    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
549    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
550    let new_stack = TaggedStack::new(capacity);
551    let new_base = new_stack.base;
552    std::mem::forget(new_stack);
553
554    unsafe {
555        for i in 0..depth {
556            let sv = *base.add(i);
557            let cloned = clone_stack_value(sv);
558            *new_base.add(i) = cloned;
559        }
560    }
561
562    unsafe { (new_base.add(depth), new_base) }
563}
564
565// ============================================================================
566// Stack Allocation Helpers
567// ============================================================================
568
569pub fn alloc_stack() -> Stack {
570    use crate::tagged_stack::TaggedStack;
571    let stack = TaggedStack::with_default_capacity();
572    let base = stack.base;
573    std::mem::forget(stack);
574    base
575}
576
577pub fn alloc_test_stack() -> Stack {
578    let stack = alloc_stack();
579    unsafe { patch_seq_set_stack_base(stack) };
580    stack
581}
582
583/// Dump all values on the stack (for REPL debugging).
584///
585/// # Safety
586/// Stack base must have been set and sp must be valid.
587#[unsafe(no_mangle)]
588pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
589    let base = get_stack_base();
590    if base.is_null() {
591        eprintln!("[stack.dump: base not set]");
592        return sp;
593    }
594
595    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
596
597    if depth == 0 {
598        println!("»");
599    } else {
600        use std::io::Write;
601        print!("» ");
602        for i in 0..depth {
603            if i > 0 {
604                print!(" ");
605            }
606            unsafe {
607                let sv = *base.add(i);
608                print_stack_value(sv);
609            }
610        }
611        println!();
612        let _ = std::io::stdout().flush();
613
614        // Drop all heap-allocated values
615        for i in 0..depth {
616            unsafe {
617                let sv = *base.add(i);
618                drop_stack_value(sv);
619            }
620        }
621    }
622
623    base
624}
625
626fn print_stack_value(sv: StackValue) {
627    use crate::son::{SonConfig, value_to_son};
628
629    let cloned = unsafe { clone_stack_value(sv) };
630    let value = unsafe { stack_value_to_value(cloned) };
631    let son = value_to_son(&value, &SonConfig::compact());
632    print!("{}", son);
633}
634
635// ============================================================================
636// Short Aliases for Internal/Test Use
637// ============================================================================
638
639pub use patch_seq_2dup as two_dup;
640pub use patch_seq_dup as dup;
641pub use patch_seq_nip as nip;
642pub use patch_seq_over as over;
643pub use patch_seq_pick_op as pick;
644pub use patch_seq_roll as roll;
645pub use patch_seq_rot as rot;
646pub use patch_seq_swap as swap;
647pub use patch_seq_tuck as tuck;
648
649#[macro_export]
650macro_rules! test_stack {
651    () => {{ $crate::stack::alloc_test_stack() }};
652}
653
654#[cfg(test)]
655mod tests {
656    use super::*;
657
658    #[test]
659    fn test_pick_negative_index_sets_error() {
660        unsafe {
661            crate::error::clear_runtime_error();
662            let stack = alloc_test_stack();
663            let stack = push(stack, Value::Int(100));
664            let stack = push(stack, Value::Int(-1));
665
666            let _stack = patch_seq_pick_op(stack);
667
668            assert!(crate::error::has_runtime_error());
669            let error = crate::error::take_runtime_error().unwrap();
670            assert!(error.contains("negative"));
671        }
672    }
673
674    #[test]
675    fn test_pick_out_of_bounds_sets_error() {
676        unsafe {
677            crate::error::clear_runtime_error();
678            let stack = alloc_test_stack();
679            let stack = push(stack, Value::Int(100));
680            let stack = push(stack, Value::Int(10));
681
682            let _stack = patch_seq_pick_op(stack);
683
684            assert!(crate::error::has_runtime_error());
685            let error = crate::error::take_runtime_error().unwrap();
686            assert!(error.contains("exceeds stack depth"));
687        }
688    }
689
690    #[test]
691    fn test_roll_negative_index_sets_error() {
692        unsafe {
693            crate::error::clear_runtime_error();
694            let stack = alloc_test_stack();
695            let stack = push(stack, Value::Int(100));
696            let stack = push(stack, Value::Int(-1));
697
698            let _stack = patch_seq_roll(stack);
699
700            assert!(crate::error::has_runtime_error());
701            let error = crate::error::take_runtime_error().unwrap();
702            assert!(error.contains("negative"));
703        }
704    }
705
706    #[test]
707    fn test_roll_out_of_bounds_sets_error() {
708        unsafe {
709            crate::error::clear_runtime_error();
710            let stack = alloc_test_stack();
711            let stack = push(stack, Value::Int(100));
712            let stack = push(stack, Value::Int(10));
713
714            let _stack = patch_seq_roll(stack);
715
716            assert!(crate::error::has_runtime_error());
717            let error = crate::error::take_runtime_error().unwrap();
718            assert!(error.contains("exceeds stack depth"));
719        }
720    }
721
722    #[test]
723    fn test_int_roundtrip() {
724        unsafe {
725            let stack = alloc_test_stack();
726            let stack = push(stack, Value::Int(42));
727            let (_, val) = pop(stack);
728            assert_eq!(val, Value::Int(42));
729        }
730    }
731
732    #[test]
733    fn test_bool_roundtrip() {
734        unsafe {
735            let stack = alloc_test_stack();
736            let stack = push(stack, Value::Bool(true));
737            let stack = push(stack, Value::Bool(false));
738            let (stack, val_f) = pop(stack);
739            let (_, val_t) = pop(stack);
740            assert_eq!(val_f, Value::Bool(false));
741            assert_eq!(val_t, Value::Bool(true));
742        }
743    }
744
745    #[test]
746    fn test_float_roundtrip() {
747        unsafe {
748            let stack = alloc_test_stack();
749            let stack = push(stack, Value::Float(std::f64::consts::PI));
750            let (_, val) = pop(stack);
751            assert_eq!(val, Value::Float(std::f64::consts::PI));
752        }
753    }
754
755    #[test]
756    fn test_string_roundtrip() {
757        unsafe {
758            let stack = alloc_test_stack();
759            let s = crate::seqstring::SeqString::from("hello");
760            let stack = push(stack, Value::String(s));
761            let (_, val) = pop(stack);
762            match val {
763                Value::String(s) => assert_eq!(s.as_str(), "hello"),
764                other => panic!("Expected String, got {:?}", other),
765            }
766        }
767    }
768
769    #[test]
770    fn test_symbol_roundtrip() {
771        unsafe {
772            let stack = alloc_test_stack();
773            let s = crate::seqstring::SeqString::from("my-sym");
774            let stack = push(stack, Value::Symbol(s));
775            let (_, val) = pop(stack);
776            match val {
777                Value::Symbol(s) => assert_eq!(s.as_str(), "my-sym"),
778                other => panic!("Expected Symbol, got {:?}", other),
779            }
780        }
781    }
782
783    #[test]
784    fn test_variant_roundtrip() {
785        unsafe {
786            let stack = alloc_test_stack();
787            let tag = crate::seqstring::SeqString::from("Foo");
788            let data = crate::value::VariantData::new(tag, vec![Value::Int(1), Value::Int(2)]);
789            let stack = push(stack, Value::Variant(std::sync::Arc::new(data)));
790            let (_, val) = pop(stack);
791            match val {
792                Value::Variant(v) => {
793                    assert_eq!(v.tag.as_str(), "Foo");
794                    assert_eq!(v.fields.len(), 2);
795                }
796                other => panic!("Expected Variant, got {:?}", other),
797            }
798        }
799    }
800
801    #[test]
802    fn test_map_roundtrip() {
803        unsafe {
804            let stack = alloc_test_stack();
805            let mut map = std::collections::HashMap::new();
806            map.insert(crate::value::MapKey::Int(1), Value::Int(100));
807            let stack = push(stack, Value::Map(Box::new(map)));
808            let (_, val) = pop(stack);
809            match val {
810                Value::Map(m) => {
811                    assert_eq!(m.len(), 1);
812                    assert_eq!(m.get(&crate::value::MapKey::Int(1)), Some(&Value::Int(100)));
813                }
814                other => panic!("Expected Map, got {:?}", other),
815            }
816        }
817    }
818
819    #[test]
820    fn test_quotation_roundtrip() {
821        unsafe {
822            let stack = alloc_test_stack();
823            let stack = push(
824                stack,
825                Value::Quotation {
826                    wrapper: 0x1000,
827                    impl_: 0x2000,
828                },
829            );
830            let (_, val) = pop(stack);
831            match val {
832                Value::Quotation { wrapper, impl_ } => {
833                    assert_eq!(wrapper, 0x1000);
834                    assert_eq!(impl_, 0x2000);
835                }
836                other => panic!("Expected Quotation, got {:?}", other),
837            }
838        }
839    }
840
841    #[test]
842    fn test_closure_roundtrip() {
843        unsafe {
844            let stack = alloc_test_stack();
845            let env: std::sync::Arc<[Value]> = std::sync::Arc::from(vec![Value::Int(42)]);
846            let stack = push(
847                stack,
848                Value::Closure {
849                    fn_ptr: 0x3000,
850                    env,
851                },
852            );
853            let (_, val) = pop(stack);
854            match val {
855                Value::Closure { fn_ptr, env } => {
856                    assert_eq!(fn_ptr, 0x3000);
857                    assert_eq!(env.len(), 1);
858                }
859                other => panic!("Expected Closure, got {:?}", other),
860            }
861        }
862    }
863
864    #[test]
865    fn test_channel_roundtrip() {
866        unsafe {
867            let stack = alloc_test_stack();
868            let (sender, receiver) = may::sync::mpmc::channel();
869            let ch = std::sync::Arc::new(crate::value::ChannelData { sender, receiver });
870            let stack = push(stack, Value::Channel(ch));
871            let (_, val) = pop(stack);
872            assert!(matches!(val, Value::Channel(_)));
873        }
874    }
875
876    #[test]
877    fn test_weavectx_roundtrip() {
878        unsafe {
879            let stack = alloc_test_stack();
880            let (ys, yr) = may::sync::mpmc::channel();
881            let (rs, rr) = may::sync::mpmc::channel();
882            let yield_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
883                sender: ys,
884                receiver: yr,
885            });
886            let resume_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
887                sender: rs,
888                receiver: rr,
889            });
890            let stack = push(
891                stack,
892                Value::WeaveCtx {
893                    yield_chan,
894                    resume_chan,
895                },
896            );
897            let (_, val) = pop(stack);
898            assert!(matches!(val, Value::WeaveCtx { .. }));
899        }
900    }
901
902    #[test]
903    fn test_dup_pop_pop_heap_type() {
904        // Verify Arc refcount handling: push a heap value, dup it (refcount 2),
905        // then pop both. No double-free or corruption should occur.
906        unsafe {
907            let stack = alloc_test_stack();
908            let stack = push(stack, Value::Float(2.5));
909            // dup: clones via Arc refcount bump
910            let stack = patch_seq_dup(stack);
911            // pop both copies
912            let (stack, val1) = pop(stack);
913            let (_, val2) = pop(stack);
914            assert_eq!(val1, Value::Float(2.5));
915            assert_eq!(val2, Value::Float(2.5));
916        }
917    }
918}