Skip to main content

seq_core/
stack.rs

1//! Stack operations for the concatenative runtime.
2//!
3//! Provides `push` / `pop` / `peek`, the shuffle FFI ops
4//! (`dup` / `swap` / `rot` / `over` / `nip` / `tuck` / `2dup` / `pick` / `roll`),
5//! coroutine-local stack-base tracking for nested strands, stack allocation
6//! helpers, and the REPL stack-dump operation.
7//!
8//! The tagged-value encoding itself (`StackValue`, tag constants,
9//! `tag_int` / `untag_int`, `TaggedStack` backing storage) lives in
10//! `tagged_stack`.
11//!
12//! The `Stack` type is a pointer to the "current position" — where the next
13//! push goes. Push stores at `*sp` and returns `sp + 1`; pop returns
14//! `sp - 1` and reads from `*(sp - 1)`.
15
16use crate::son::{SonConfig, value_to_son};
17use crate::tagged_stack::{
18    DEFAULT_STACK_CAPACITY, StackValue, TAG_FALSE, TAG_TRUE, TaggedStack, is_tagged_int, tag_int,
19    untag_int,
20};
21use crate::value::Value;
22use std::cell::Cell;
23use std::sync::Arc;
24
25/// Stack: A pointer to the current position in a contiguous array of u64.
26pub type Stack = *mut StackValue;
27
28#[inline]
29pub fn stack_value_size() -> usize {
30    std::mem::size_of::<StackValue>()
31}
32
33/// Discriminant constants — retained for API compatibility with codegen and
34/// runtime code that switches on type. In tagged-ptr mode, these values are
35/// NOT stored in the StackValue itself (the tag is in the pointer bits).
36/// They are used only when the runtime unpacks a Value (via pop()) and needs
37/// to identify its type. Phase 2 codegen will use bit-level tag checks instead
38/// of loading these discriminants from memory.
39pub const DISC_INT: u64 = 0;
40pub const DISC_FLOAT: u64 = 1;
41pub const DISC_BOOL: u64 = 2;
42pub const DISC_STRING: u64 = 3;
43pub const DISC_VARIANT: u64 = 4;
44pub const DISC_MAP: u64 = 5;
45pub const DISC_QUOTATION: u64 = 6;
46pub const DISC_CLOSURE: u64 = 7;
47pub const DISC_CHANNEL: u64 = 8;
48pub const DISC_WEAVECTX: u64 = 9;
49pub const DISC_SYMBOL: u64 = 10;
50
51/// Convert a Value to a tagged StackValue
52#[inline]
53pub fn value_to_stack_value(value: Value) -> StackValue {
54    match value {
55        Value::Int(i) => tag_int(i),
56        Value::Bool(false) => TAG_FALSE,
57        Value::Bool(true) => TAG_TRUE,
58        other => {
59            // Heap-allocate via Arc for O(1) clone (refcount bump)
60            Arc::into_raw(Arc::new(other)) as u64
61        }
62    }
63}
64
65/// Convert a tagged StackValue back to a Value (takes ownership)
66///
67/// # Safety
68/// The StackValue must contain valid data — either a tagged int, bool,
69/// or a valid heap pointer from Arc::into_raw.
70#[inline]
71pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
72    if is_tagged_int(sv) {
73        Value::Int(untag_int(sv))
74    } else if sv == TAG_FALSE {
75        Value::Bool(false)
76    } else if sv == TAG_TRUE {
77        Value::Bool(true)
78    } else {
79        // Heap pointer — take ownership of the Arc<Value>
80        let arc = unsafe { Arc::from_raw(sv as *const Value) };
81        // Try to unwrap without cloning if we're the sole owner.
82        // Clone fallback happens when the value was dup'd on the stack
83        // (multiple Arc references exist and haven't been dropped yet).
84        Arc::try_unwrap(arc).unwrap_or_else(|arc| (*arc).clone())
85    }
86}
87
88/// Clone a StackValue from LLVM IR.
89///
90/// # Safety
91/// src and dst must be valid pointers to StackValue slots.
92#[unsafe(no_mangle)]
93pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
94    unsafe {
95        let sv = *src;
96        let cloned = clone_stack_value(sv);
97        *dst = cloned;
98    }
99}
100
101/// Clone a tagged StackValue, handling heap types.
102///
103/// - Int, Bool: bitwise copy (no allocation)
104/// - Heap types: clone the Value and re-box
105///
106/// # Safety
107/// The StackValue must contain valid tagged data.
108#[inline]
109pub unsafe fn clone_stack_value(sv: StackValue) -> StackValue {
110    if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
111        // Int or Bool — just copy
112        sv
113    } else {
114        // Heap pointer — increment Arc refcount (O(1), no allocation)
115        unsafe {
116            let arc = Arc::from_raw(sv as *const Value);
117            let cloned = Arc::clone(&arc);
118            std::mem::forget(arc); // Don't decrement the original
119            Arc::into_raw(cloned) as u64
120        }
121    }
122}
123
124/// Drop a tagged StackValue, freeing heap types.
125///
126/// # Safety
127/// The StackValue must be valid and not previously dropped.
128#[inline]
129pub unsafe fn drop_stack_value(sv: StackValue) {
130    if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
131        // Int or Bool — nothing to do
132        return;
133    }
134    // Heap pointer — decrement Arc refcount, free if last reference
135    unsafe {
136        let _ = Arc::from_raw(sv as *const Value);
137    }
138}
139
140// ============================================================================
141// Core Stack Operations
142// ============================================================================
143
144/// Push a value onto the stack.
145///
146/// # Safety
147/// Stack pointer must be valid and have room for the value.
148#[inline]
149pub unsafe fn push(stack: Stack, value: Value) -> Stack {
150    unsafe {
151        let sv = value_to_stack_value(value);
152        *stack = sv;
153        stack.add(1)
154    }
155}
156
157/// Push a StackValue directly onto the stack.
158///
159/// # Safety
160/// Stack pointer must be valid and have room for the value.
161#[inline]
162pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
163    unsafe {
164        *stack = sv;
165        stack.add(1)
166    }
167}
168
169/// Pop a value from the stack.
170///
171/// # Safety
172/// Stack must have at least one value.
173#[inline]
174pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
175    unsafe {
176        let new_sp = stack.sub(1);
177        let sv = *new_sp;
178        (new_sp, stack_value_to_value(sv))
179    }
180}
181
182/// Pop a StackValue directly from the stack.
183///
184/// # Safety
185/// Stack must have at least one value.
186#[inline]
187pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
188    unsafe {
189        let new_sp = stack.sub(1);
190        let sv = *new_sp;
191        (new_sp, sv)
192    }
193}
194
195/// Pop two values from the stack.
196///
197/// # Safety
198/// Stack must have at least two values.
199#[inline]
200pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
201    unsafe {
202        let (sp, b) = pop(stack);
203        let (sp, a) = pop(sp);
204        (sp, a, b)
205    }
206}
207
208/// Peek at the top value without removing it.
209///
210/// # Safety
211/// Stack must have at least one value.
212#[inline]
213pub unsafe fn peek(stack: Stack) -> Value {
214    unsafe {
215        let sv = *stack.sub(1);
216        let cloned = clone_stack_value(sv);
217        stack_value_to_value(cloned)
218    }
219}
220
221/// Peek at the raw StackValue without removing it.
222///
223/// # Safety
224/// Stack must have at least one value.
225#[inline]
226pub unsafe fn peek_sv(stack: Stack) -> StackValue {
227    unsafe { *stack.sub(1) }
228}
229
230/// Get a mutable reference to a heap Value at the given stack position
231/// without popping (no Arc alloc/dealloc cycle).
232///
233/// Returns `Some(&mut Value)` if the slot is a sole-owned heap value.
234/// Returns `None` if the slot is inline (Int/Bool) or shared (refcount > 1).
235///
236/// Sole ownership is verified via `Arc::get_mut`, which atomically checks
237/// both strong and weak refcounts — the same guard used throughout the
238/// codebase for COW mutations.
239///
240/// The caller MUST NOT move or replace the Value behind the reference —
241/// it is still owned by the Arc on the stack. Mutating fields in place
242/// (e.g., Vec::push on VariantData.fields) is the intended use.
243///
244/// # Safety
245/// - `slot` must point to a valid StackValue within the stack.
246/// - The stack must not be concurrently accessed (true for strand-local stacks).
247/// - The returned reference is bounded by lifetime `'a`; the caller must
248///   ensure it does not outlive the stack slot.
249///
250/// # Tagged-value encoding
251/// The inline-value guard covers all non-heap encodings exhaustively:
252/// Int (odd bits), Bool false (0x0), Bool true (0x2). Every other value
253/// (even > 2) is a valid `Arc<Value>` heap pointer.
254#[inline]
255pub unsafe fn heap_value_mut<'a>(slot: *mut StackValue) -> Option<&'a mut Value> {
256    unsafe {
257        let sv = *slot;
258        // All non-heap encodings: Int (odd), Bool false (0x0), Bool true (0x2)
259        if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
260            return None;
261        }
262        // Reconstruct Arc, check sole ownership via Arc::get_mut (atomic check
263        // of both strong and weak refcounts), then forget to leave it on the stack.
264        let mut arc = Arc::from_raw(sv as *const Value);
265        let val_ref = Arc::get_mut(&mut arc).map(|v| &mut *(v as *mut Value));
266        std::mem::forget(arc); // Don't decrement — Arc stays on the stack
267        val_ref
268    }
269}
270
271/// Convenience: get a mutable reference to the heap Value at stack top (sp - 1).
272///
273/// # Safety
274/// Stack must have at least one value. See `heap_value_mut` for lifetime rules.
275#[inline]
276pub unsafe fn peek_heap_mut<'a>(stack: Stack) -> Option<&'a mut Value> {
277    unsafe { heap_value_mut(stack.sub(1)) }
278}
279
280/// Convenience: get a mutable reference to the heap Value at sp - 2
281/// (second from top).
282///
283/// # Safety
284/// Stack must have at least two values. See `heap_value_mut` for lifetime rules.
285#[inline]
286pub unsafe fn peek_heap_mut_second<'a>(stack: Stack) -> Option<&'a mut Value> {
287    unsafe { heap_value_mut(stack.sub(2)) }
288}
289
290// ============================================================================
291// FFI Stack Operations
292// ============================================================================
293
294/// Duplicate the top value: ( a -- a a )
295///
296/// # Safety
297/// Stack must have at least one value.
298#[unsafe(no_mangle)]
299pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
300    unsafe {
301        let sv = peek_sv(stack);
302        let cloned = clone_stack_value(sv);
303        push_sv(stack, cloned)
304    }
305}
306
307/// Pop the top value and drop it (decrement Arc refcount for heap types).
308///
309/// Private helper shared by `patch_seq_drop_op` and any Rust-side caller
310/// that needs to discard the top of the stack without materializing a
311/// `Value`.
312///
313/// # Safety
314/// Stack must have at least one value.
315#[inline]
316pub unsafe fn drop_top(stack: Stack) -> Stack {
317    unsafe {
318        let (new_sp, sv) = pop_sv(stack);
319        drop_stack_value(sv);
320        new_sp
321    }
322}
323
324/// # Safety
325/// Stack must have at least one value.
326#[unsafe(no_mangle)]
327pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
328    unsafe { drop_top(stack) }
329}
330
331/// # Safety
332/// Stack pointer must be valid and have room for the value.
333#[allow(improper_ctypes_definitions)]
334#[unsafe(no_mangle)]
335pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
336    unsafe { push(stack, value) }
337}
338
339/// Swap the top two values: ( a b -- b a )
340///
341/// # Safety
342/// Stack must have at least two values.
343#[unsafe(no_mangle)]
344pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
345    unsafe {
346        let ptr_b = stack.sub(1);
347        let ptr_a = stack.sub(2);
348        let a = *ptr_a;
349        let b = *ptr_b;
350        *ptr_a = b;
351        *ptr_b = a;
352        stack
353    }
354}
355
356/// Copy the second value to the top: ( a b -- a b a )
357///
358/// # Safety
359/// Stack must have at least two values.
360#[unsafe(no_mangle)]
361pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
362    unsafe {
363        let sv_a = *stack.sub(2);
364        let cloned = clone_stack_value(sv_a);
365        push_sv(stack, cloned)
366    }
367}
368
369/// Rotate the top three values: ( a b c -- b c a )
370///
371/// # Safety
372/// Stack must have at least three values.
373#[unsafe(no_mangle)]
374pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
375    unsafe {
376        let ptr_c = stack.sub(1);
377        let ptr_b = stack.sub(2);
378        let ptr_a = stack.sub(3);
379        let a = *ptr_a;
380        let b = *ptr_b;
381        let c = *ptr_c;
382        *ptr_a = b;
383        *ptr_b = c;
384        *ptr_c = a;
385        stack
386    }
387}
388
389/// Remove the second value: ( a b -- b )
390///
391/// # Safety
392/// Stack must have at least two values.
393#[unsafe(no_mangle)]
394pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
395    unsafe {
396        let ptr_b = stack.sub(1);
397        let ptr_a = stack.sub(2);
398        let a = *ptr_a;
399        let b = *ptr_b;
400        drop_stack_value(a);
401        *ptr_a = b;
402        stack.sub(1)
403    }
404}
405
406/// Copy top value below second: ( a b -- b a b )
407///
408/// # Safety
409/// Stack must have at least two values.
410#[unsafe(no_mangle)]
411pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
412    unsafe {
413        let ptr_b = stack.sub(1);
414        let ptr_a = stack.sub(2);
415        let a = *ptr_a;
416        let b = *ptr_b;
417        let b_clone = clone_stack_value(b);
418        *ptr_a = b;
419        *ptr_b = a;
420        push_sv(stack, b_clone)
421    }
422}
423
424/// Duplicate top two values: ( a b -- a b a b )
425///
426/// # Safety
427/// Stack must have at least two values.
428#[unsafe(no_mangle)]
429pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
430    unsafe {
431        let sv_a = *stack.sub(2);
432        let sv_b = *stack.sub(1);
433        let a_clone = clone_stack_value(sv_a);
434        let b_clone = clone_stack_value(sv_b);
435        let sp = push_sv(stack, a_clone);
436        push_sv(sp, b_clone)
437    }
438}
439
440/// Pop and type-check the Int index for pick/roll-style ops.
441///
442/// On success returns `(sp_after_pop, index)`. On failure sets a runtime
443/// error and returns `Err(sp_after_pop)` — callers should propagate that
444/// pointer unchanged so the stack slot stays consumed.
445///
446/// # Safety
447/// Stack must have at least one value.
448#[inline]
449unsafe fn pop_and_validate_index(stack: Stack, op_name: &str) -> Result<(Stack, usize), Stack> {
450    unsafe {
451        let (sp, n_val) = pop(stack);
452        let n_raw = match n_val {
453            Value::Int(i) => i,
454            _ => {
455                crate::error::set_runtime_error(format!(
456                    "{}: expected Int index on top of stack",
457                    op_name
458                ));
459                return Err(sp);
460            }
461        };
462        if n_raw < 0 {
463            crate::error::set_runtime_error(format!(
464                "{}: index cannot be negative (got {})",
465                op_name, n_raw
466            ));
467            return Err(sp);
468        }
469        Ok((sp, n_raw as usize))
470    }
471}
472
473/// Verify the stack holds at least `n + 1` values beyond the current base.
474/// Sets a runtime error and returns `false` on underflow.
475#[inline]
476fn check_depth_for_index(sp: Stack, n: usize, op_name: &str) -> bool {
477    let base = get_stack_base();
478    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
479    if n >= depth {
480        crate::error::set_runtime_error(format!(
481            "{}: index {} exceeds stack depth {} (need at least {} values)",
482            op_name,
483            n,
484            depth,
485            n + 1
486        ));
487        return false;
488    }
489    true
490}
491
492/// Pick: Copy the nth value to the top.
493///
494/// # Safety
495/// Stack must have at least n+2 values (n+1 data values plus the index).
496#[unsafe(no_mangle)]
497pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
498    unsafe {
499        let (sp, n) = match pop_and_validate_index(stack, "pick") {
500            Ok(x) => x,
501            Err(sp) => return sp,
502        };
503        if !check_depth_for_index(sp, n, "pick") {
504            return sp;
505        }
506
507        let sv = *sp.sub(n + 1);
508        let cloned = clone_stack_value(sv);
509        push_sv(sp, cloned)
510    }
511}
512
513/// Roll: Rotate n+1 items, bringing the item at depth n to the top.
514///
515/// # Safety
516/// Stack must have at least n+2 values (n+1 data values plus the index).
517#[unsafe(no_mangle)]
518pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
519    unsafe {
520        let (sp, n) = match pop_and_validate_index(stack, "roll") {
521            Ok(x) => x,
522            Err(sp) => return sp,
523        };
524
525        if n == 0 {
526            return sp;
527        }
528        if n == 1 {
529            return patch_seq_swap(sp);
530        }
531        if n == 2 {
532            return patch_seq_rot(sp);
533        }
534
535        if !check_depth_for_index(sp, n, "roll") {
536            return sp;
537        }
538
539        let src_ptr = sp.sub(n + 1);
540        let saved = *src_ptr;
541        std::ptr::copy(src_ptr.add(1), src_ptr, n);
542        *sp.sub(1) = saved;
543
544        sp
545    }
546}
547
548// ============================================================================
549// Coroutine-Local Stack Base Tracking
550// ============================================================================
551
552may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
553
554/// # Safety
555/// Base pointer must be a valid stack pointer for the current strand.
556#[unsafe(no_mangle)]
557pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
558    STACK_BASE.with(|cell| {
559        cell.set(base as usize);
560    });
561}
562
563/// Read the current strand's stack base, or a null pointer if unset.
564#[inline]
565pub fn get_stack_base() -> Stack {
566    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
567}
568
569/// # Safety
570/// Current stack must have a valid base set via `patch_seq_set_stack_base`.
571#[unsafe(no_mangle)]
572pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
573    unsafe {
574        let (new_sp, _base) = clone_stack_with_base(sp);
575        new_sp
576    }
577}
578
579/// # Safety
580/// Current stack must have a valid base set and sp must point within the stack.
581pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
582    let base = get_stack_base();
583    if base.is_null() {
584        panic!("clone_stack: stack base not set");
585    }
586
587    let depth = unsafe { sp.offset_from(base) as usize };
588
589    if depth == 0 {
590        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
591        let new_base = new_stack.base;
592        std::mem::forget(new_stack);
593        return (new_base, new_base);
594    }
595
596    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
597    let new_stack = TaggedStack::new(capacity);
598    let new_base = new_stack.base;
599    std::mem::forget(new_stack);
600
601    unsafe {
602        for i in 0..depth {
603            let sv = *base.add(i);
604            let cloned = clone_stack_value(sv);
605            *new_base.add(i) = cloned;
606        }
607    }
608
609    unsafe { (new_base.add(depth), new_base) }
610}
611
612// ============================================================================
613// Stack Allocation Helpers
614// ============================================================================
615
616/// Allocate a fresh stack buffer and return its base pointer.
617///
618/// The caller takes ownership of the underlying `TaggedStack` storage via
619/// the raw base pointer — the `TaggedStack` wrapper is intentionally leaked
620/// here so the coroutine-native `Stack` type can be a plain `*mut StackValue`.
621pub fn alloc_stack() -> Stack {
622    let stack = TaggedStack::with_default_capacity();
623    let base = stack.base;
624    std::mem::forget(stack);
625    base
626}
627
628/// Allocate a fresh stack and register it as the current strand's base.
629///
630/// Convenience wrapper for tests: installs the stack base so ops like
631/// `pick` / `roll` / `clone_stack` that depend on `get_stack_base()`
632/// behave correctly in a single-strand test harness.
633pub fn alloc_test_stack() -> Stack {
634    let stack = alloc_stack();
635    unsafe { patch_seq_set_stack_base(stack) };
636    stack
637}
638
639/// Dump all values on the stack (for REPL debugging).
640///
641/// # Safety
642/// Stack base must have been set and sp must be valid.
643#[unsafe(no_mangle)]
644pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
645    let base = get_stack_base();
646    if base.is_null() {
647        eprintln!("[stack.dump: base not set]");
648        return sp;
649    }
650
651    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
652
653    if depth == 0 {
654        println!("»");
655    } else {
656        use std::io::Write;
657        print!("» ");
658        for i in 0..depth {
659            if i > 0 {
660                print!(" ");
661            }
662            unsafe {
663                let sv = *base.add(i);
664                print_stack_value(sv);
665            }
666        }
667        println!();
668        let _ = std::io::stdout().flush();
669
670        // Drop all heap-allocated values
671        for i in 0..depth {
672            unsafe {
673                let sv = *base.add(i);
674                drop_stack_value(sv);
675            }
676        }
677    }
678
679    base
680}
681
682fn print_stack_value(sv: StackValue) {
683    let cloned = unsafe { clone_stack_value(sv) };
684    let value = unsafe { stack_value_to_value(cloned) };
685    let son = value_to_son(&value, &SonConfig::compact());
686    print!("{}", son);
687}
688
689// ============================================================================
690// Short Aliases for Internal/Test Use
691// ============================================================================
692
693pub use patch_seq_2dup as two_dup;
694pub use patch_seq_dup as dup;
695pub use patch_seq_nip as nip;
696pub use patch_seq_over as over;
697pub use patch_seq_pick_op as pick;
698pub use patch_seq_roll as roll;
699pub use patch_seq_rot as rot;
700pub use patch_seq_swap as swap;
701pub use patch_seq_tuck as tuck;
702
703#[macro_export]
704macro_rules! test_stack {
705    () => {{ $crate::stack::alloc_test_stack() }};
706}
707
708#[cfg(test)]
709mod tests {
710    use super::*;
711
712    #[test]
713    fn test_pick_negative_index_sets_error() {
714        unsafe {
715            crate::error::clear_runtime_error();
716            let stack = alloc_test_stack();
717            let stack = push(stack, Value::Int(100));
718            let stack = push(stack, Value::Int(-1));
719
720            let _stack = patch_seq_pick_op(stack);
721
722            assert!(crate::error::has_runtime_error());
723            let error = crate::error::take_runtime_error().unwrap();
724            assert!(error.contains("negative"));
725        }
726    }
727
728    #[test]
729    fn test_pick_out_of_bounds_sets_error() {
730        unsafe {
731            crate::error::clear_runtime_error();
732            let stack = alloc_test_stack();
733            let stack = push(stack, Value::Int(100));
734            let stack = push(stack, Value::Int(10));
735
736            let _stack = patch_seq_pick_op(stack);
737
738            assert!(crate::error::has_runtime_error());
739            let error = crate::error::take_runtime_error().unwrap();
740            assert!(error.contains("exceeds stack depth"));
741        }
742    }
743
744    #[test]
745    fn test_roll_negative_index_sets_error() {
746        unsafe {
747            crate::error::clear_runtime_error();
748            let stack = alloc_test_stack();
749            let stack = push(stack, Value::Int(100));
750            let stack = push(stack, Value::Int(-1));
751
752            let _stack = patch_seq_roll(stack);
753
754            assert!(crate::error::has_runtime_error());
755            let error = crate::error::take_runtime_error().unwrap();
756            assert!(error.contains("negative"));
757        }
758    }
759
760    #[test]
761    fn test_roll_out_of_bounds_sets_error() {
762        unsafe {
763            crate::error::clear_runtime_error();
764            let stack = alloc_test_stack();
765            let stack = push(stack, Value::Int(100));
766            let stack = push(stack, Value::Int(10));
767
768            let _stack = patch_seq_roll(stack);
769
770            assert!(crate::error::has_runtime_error());
771            let error = crate::error::take_runtime_error().unwrap();
772            assert!(error.contains("exceeds stack depth"));
773        }
774    }
775
776    #[test]
777    fn test_int_roundtrip() {
778        unsafe {
779            let stack = alloc_test_stack();
780            let stack = push(stack, Value::Int(42));
781            let (_, val) = pop(stack);
782            assert_eq!(val, Value::Int(42));
783        }
784    }
785
786    #[test]
787    fn test_bool_roundtrip() {
788        unsafe {
789            let stack = alloc_test_stack();
790            let stack = push(stack, Value::Bool(true));
791            let stack = push(stack, Value::Bool(false));
792            let (stack, val_f) = pop(stack);
793            let (_, val_t) = pop(stack);
794            assert_eq!(val_f, Value::Bool(false));
795            assert_eq!(val_t, Value::Bool(true));
796        }
797    }
798
799    #[test]
800    fn test_float_roundtrip() {
801        unsafe {
802            let stack = alloc_test_stack();
803            let stack = push(stack, Value::Float(std::f64::consts::PI));
804            let (_, val) = pop(stack);
805            assert_eq!(val, Value::Float(std::f64::consts::PI));
806        }
807    }
808
809    #[test]
810    fn test_string_roundtrip() {
811        unsafe {
812            let stack = alloc_test_stack();
813            let s = crate::seqstring::SeqString::from("hello");
814            let stack = push(stack, Value::String(s));
815            let (_, val) = pop(stack);
816            match val {
817                Value::String(s) => assert_eq!(s.as_str(), "hello"),
818                other => panic!("Expected String, got {:?}", other),
819            }
820        }
821    }
822
823    #[test]
824    fn test_symbol_roundtrip() {
825        unsafe {
826            let stack = alloc_test_stack();
827            let s = crate::seqstring::SeqString::from("my-sym");
828            let stack = push(stack, Value::Symbol(s));
829            let (_, val) = pop(stack);
830            match val {
831                Value::Symbol(s) => assert_eq!(s.as_str(), "my-sym"),
832                other => panic!("Expected Symbol, got {:?}", other),
833            }
834        }
835    }
836
837    #[test]
838    fn test_variant_roundtrip() {
839        unsafe {
840            let stack = alloc_test_stack();
841            let tag = crate::seqstring::SeqString::from("Foo");
842            let data = crate::value::VariantData::new(tag, vec![Value::Int(1), Value::Int(2)]);
843            let stack = push(stack, Value::Variant(std::sync::Arc::new(data)));
844            let (_, val) = pop(stack);
845            match val {
846                Value::Variant(v) => {
847                    assert_eq!(v.tag.as_str(), "Foo");
848                    assert_eq!(v.fields.len(), 2);
849                }
850                other => panic!("Expected Variant, got {:?}", other),
851            }
852        }
853    }
854
855    #[test]
856    fn test_map_roundtrip() {
857        unsafe {
858            let stack = alloc_test_stack();
859            let mut map = std::collections::HashMap::new();
860            map.insert(crate::value::MapKey::Int(1), Value::Int(100));
861            let stack = push(stack, Value::Map(Box::new(map)));
862            let (_, val) = pop(stack);
863            match val {
864                Value::Map(m) => {
865                    assert_eq!(m.len(), 1);
866                    assert_eq!(m.get(&crate::value::MapKey::Int(1)), Some(&Value::Int(100)));
867                }
868                other => panic!("Expected Map, got {:?}", other),
869            }
870        }
871    }
872
873    #[test]
874    fn test_quotation_roundtrip() {
875        unsafe {
876            let stack = alloc_test_stack();
877            let stack = push(
878                stack,
879                Value::Quotation {
880                    wrapper: 0x1000,
881                    impl_: 0x2000,
882                },
883            );
884            let (_, val) = pop(stack);
885            match val {
886                Value::Quotation { wrapper, impl_ } => {
887                    assert_eq!(wrapper, 0x1000);
888                    assert_eq!(impl_, 0x2000);
889                }
890                other => panic!("Expected Quotation, got {:?}", other),
891            }
892        }
893    }
894
895    #[test]
896    fn test_closure_roundtrip() {
897        unsafe {
898            let stack = alloc_test_stack();
899            let env: std::sync::Arc<[Value]> = std::sync::Arc::from(vec![Value::Int(42)]);
900            let stack = push(
901                stack,
902                Value::Closure {
903                    fn_ptr: 0x3000,
904                    env,
905                },
906            );
907            let (_, val) = pop(stack);
908            match val {
909                Value::Closure { fn_ptr, env } => {
910                    assert_eq!(fn_ptr, 0x3000);
911                    assert_eq!(env.len(), 1);
912                }
913                other => panic!("Expected Closure, got {:?}", other),
914            }
915        }
916    }
917
918    #[test]
919    fn test_channel_roundtrip() {
920        unsafe {
921            let stack = alloc_test_stack();
922            let (sender, receiver) = may::sync::mpmc::channel();
923            let ch = std::sync::Arc::new(crate::value::ChannelData { sender, receiver });
924            let stack = push(stack, Value::Channel(ch));
925            let (_, val) = pop(stack);
926            assert!(matches!(val, Value::Channel(_)));
927        }
928    }
929
930    #[test]
931    fn test_weavectx_roundtrip() {
932        unsafe {
933            let stack = alloc_test_stack();
934            let (ys, yr) = may::sync::mpmc::channel();
935            let (rs, rr) = may::sync::mpmc::channel();
936            let yield_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
937                sender: ys,
938                receiver: yr,
939            });
940            let resume_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
941                sender: rs,
942                receiver: rr,
943            });
944            let stack = push(
945                stack,
946                Value::WeaveCtx {
947                    yield_chan,
948                    resume_chan,
949                },
950            );
951            let (_, val) = pop(stack);
952            assert!(matches!(val, Value::WeaveCtx { .. }));
953        }
954    }
955
956    #[test]
957    fn test_dup_pop_pop_heap_type() {
958        // Verify Arc refcount handling: push a heap value, dup it (refcount 2),
959        // then pop both. No double-free or corruption should occur.
960        unsafe {
961            let stack = alloc_test_stack();
962            let stack = push(stack, Value::Float(2.5));
963            // dup: clones via Arc refcount bump
964            let stack = patch_seq_dup(stack);
965            // pop both copies
966            let (stack, val1) = pop(stack);
967            let (_, val2) = pop(stack);
968            assert_eq!(val1, Value::Float(2.5));
969            assert_eq!(val2, Value::Float(2.5));
970        }
971    }
972}