seq_runtime/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! This module implements the stack using a contiguous array of 40-byte StackValue entries.
4//! Each StackValue has the layout: { slot0: discriminant, slot1-4: payload }
5//!
6//! The Stack type is a pointer to the "current position" (where next push goes).
7//! - Push: store at *sp, return sp + 1
8//! - Pop: return sp - 1, read from *(sp - 1)
9
10use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14/// Stack: A pointer to the current position in a contiguous array of StackValue.
15///
16/// Points to where the next value would be pushed.
17/// sp - 1 points to the top value, sp - 2 to second, etc.
18pub type Stack = *mut StackValue;
19
20/// Discriminant values matching codegen
21pub const DISC_INT: u64 = 0;
22pub const DISC_FLOAT: u64 = 1;
23pub const DISC_BOOL: u64 = 2;
24pub const DISC_STRING: u64 = 3;
25pub const DISC_VARIANT: u64 = 4;
26pub const DISC_MAP: u64 = 5;
27pub const DISC_QUOTATION: u64 = 6;
28pub const DISC_CLOSURE: u64 = 7;
29pub const DISC_CHANNEL: u64 = 8;
30pub const DISC_WEAVECTX: u64 = 9;
31
32/// Convert a Value to a StackValue for pushing onto the tagged stack
33#[inline]
34pub fn value_to_stack_value(value: Value) -> StackValue {
35    match value {
36        Value::Int(i) => StackValue {
37            slot0: DISC_INT,
38            slot1: i as u64,
39            slot2: 0,
40            slot3: 0,
41            slot4: 0,
42        },
43        Value::Float(f) => StackValue {
44            slot0: DISC_FLOAT,
45            slot1: f.to_bits(),
46            slot2: 0,
47            slot3: 0,
48            slot4: 0,
49        },
50        Value::Bool(b) => StackValue {
51            slot0: DISC_BOOL,
52            slot1: if b { 1 } else { 0 },
53            slot2: 0,
54            slot3: 0,
55            slot4: 0,
56        },
57        Value::String(s) => {
58            // SeqString has: ptr, len, capacity, global
59            // Store these in slots 1-4
60            let (ptr, len, capacity, global) = s.into_raw_parts();
61            StackValue {
62                slot0: DISC_STRING,
63                slot1: ptr as u64,
64                slot2: len as u64,
65                slot3: capacity as u64,
66                slot4: if global { 1 } else { 0 },
67            }
68        }
69        Value::Variant(v) => {
70            let ptr = Arc::into_raw(v) as u64;
71            StackValue {
72                slot0: DISC_VARIANT,
73                slot1: ptr,
74                slot2: 0,
75                slot3: 0,
76                slot4: 0,
77            }
78        }
79        Value::Map(m) => {
80            let ptr = Box::into_raw(m) as u64;
81            StackValue {
82                slot0: DISC_MAP,
83                slot1: ptr,
84                slot2: 0,
85                slot3: 0,
86                slot4: 0,
87            }
88        }
89        Value::Quotation { wrapper, impl_ } => StackValue {
90            slot0: DISC_QUOTATION,
91            slot1: wrapper as u64,
92            slot2: impl_ as u64,
93            slot3: 0,
94            slot4: 0,
95        },
96        Value::Closure { fn_ptr, env } => {
97            // Arc<[Value]> is a fat pointer - use Box to store it
98            let env_box = Box::new(env);
99            let env_ptr = Box::into_raw(env_box) as u64;
100            StackValue {
101                slot0: DISC_CLOSURE,
102                slot1: fn_ptr as u64,
103                slot2: env_ptr,
104                slot3: 0,
105                slot4: 0,
106            }
107        }
108        Value::Channel(ch) => {
109            // Store Arc<ChannelData> as raw pointer
110            let ptr = Arc::into_raw(ch) as u64;
111            StackValue {
112                slot0: DISC_CHANNEL,
113                slot1: ptr,
114                slot2: 0,
115                slot3: 0,
116                slot4: 0,
117            }
118        }
119        Value::WeaveCtx {
120            yield_chan,
121            resume_chan,
122        } => {
123            // Store both Arc<ChannelData> as raw pointers
124            let yield_ptr = Arc::into_raw(yield_chan) as u64;
125            let resume_ptr = Arc::into_raw(resume_chan) as u64;
126            StackValue {
127                slot0: DISC_WEAVECTX,
128                slot1: yield_ptr,
129                slot2: resume_ptr,
130                slot3: 0,
131                slot4: 0,
132            }
133        }
134    }
135}
136
137/// Convert a StackValue back to a Value
138///
139/// # Safety
140/// The StackValue must contain valid data for its discriminant
141#[inline]
142pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
143    unsafe {
144        match sv.slot0 {
145            DISC_INT => Value::Int(sv.slot1 as i64),
146            DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
147            DISC_BOOL => Value::Bool(sv.slot1 != 0),
148            DISC_STRING => {
149                use crate::seqstring::SeqString;
150                let ptr = sv.slot1 as *const u8;
151                let len = sv.slot2 as usize;
152                let capacity = sv.slot3 as usize;
153                let global = sv.slot4 != 0;
154                Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
155            }
156            DISC_VARIANT => {
157                use crate::value::VariantData;
158                let arc = Arc::from_raw(sv.slot1 as *const VariantData);
159                Value::Variant(arc)
160            }
161            DISC_MAP => {
162                use crate::value::MapKey;
163                use std::collections::HashMap;
164                let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
165                Value::Map(boxed)
166            }
167            DISC_QUOTATION => Value::Quotation {
168                wrapper: sv.slot1 as usize,
169                impl_: sv.slot2 as usize,
170            },
171            DISC_CLOSURE => {
172                // Unbox the Arc<[Value]> that we boxed in value_to_stack_value
173                let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
174                Value::Closure {
175                    fn_ptr: sv.slot1 as usize,
176                    env: *env_box,
177                }
178            }
179            DISC_CHANNEL => {
180                use crate::value::ChannelData;
181                let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
182                Value::Channel(arc)
183            }
184            DISC_WEAVECTX => {
185                use crate::value::ChannelData;
186                let yield_chan = Arc::from_raw(sv.slot1 as *const ChannelData);
187                let resume_chan = Arc::from_raw(sv.slot2 as *const ChannelData);
188                Value::WeaveCtx {
189                    yield_chan,
190                    resume_chan,
191                }
192            }
193            _ => panic!("Invalid discriminant: {}", sv.slot0),
194        }
195    }
196}
197
198/// Clone a StackValue from LLVM IR - reads from src pointer, writes to dst pointer.
199/// This is the FFI-callable version for inline codegen that avoids ABI issues
200/// with passing large structs by value.
201///
202/// # Safety
203/// Both src and dst pointers must be valid and properly aligned StackValue pointers.
204#[unsafe(no_mangle)]
205pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
206    unsafe {
207        let sv = &*src;
208        let cloned = clone_stack_value(sv);
209        *dst = cloned;
210    }
211}
212
213/// Clone a StackValue, handling reference counting for heap types.
214///
215/// # Cloning Strategy by Type
216/// - **Int, Float, Bool, Quotation**: Bitwise copy (no heap allocation)
217/// - **String**: Deep copy to a new global (refcounted) string. This is necessary
218///   because the source may be an arena-allocated string that would become invalid
219///   when the arena resets. Global strings are heap-allocated with Arc refcounting.
220/// - **Variant**: Arc refcount increment (O(1), shares underlying data)
221/// - **Map**: Deep clone of the HashMap and all contained values
222/// - **Closure**: Deep clone of the Arc<[Value]> environment
223/// - **Channel**: Arc refcount increment (O(1), shares underlying sender/receiver)
224///
225/// # Safety
226/// The StackValue must contain valid data for its discriminant.
227#[inline]
228pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
229    unsafe {
230        match sv.slot0 {
231            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
232            DISC_STRING => {
233                // Deep copy: arena strings may become invalid, so always create a global string
234                let ptr = sv.slot1 as *const u8;
235                let len = sv.slot2 as usize;
236                debug_assert!(!ptr.is_null(), "String pointer is null");
237                // Read the string content without taking ownership
238                let slice = std::slice::from_raw_parts(ptr, len);
239                // Validate UTF-8 in debug builds, skip in release for performance
240                #[cfg(debug_assertions)]
241                let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
242                #[cfg(not(debug_assertions))]
243                let s = std::str::from_utf8_unchecked(slice);
244                // Clone to a new global string
245                let cloned = crate::seqstring::global_string(s.to_string());
246                let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
247                StackValue {
248                    slot0: DISC_STRING,
249                    slot1: new_ptr as u64,
250                    slot2: new_len as u64,
251                    slot3: new_cap as u64,
252                    slot4: if new_global { 1 } else { 0 },
253                }
254            }
255            DISC_VARIANT => {
256                use crate::value::VariantData;
257                let ptr = sv.slot1 as *const VariantData;
258                debug_assert!(!ptr.is_null(), "Variant pointer is null");
259                debug_assert!(
260                    (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
261                    "Variant pointer is misaligned"
262                );
263                let arc = Arc::from_raw(ptr);
264                let cloned = Arc::clone(&arc);
265                std::mem::forget(arc);
266                StackValue {
267                    slot0: DISC_VARIANT,
268                    slot1: Arc::into_raw(cloned) as u64,
269                    slot2: 0,
270                    slot3: 0,
271                    slot4: 0,
272                }
273            }
274            DISC_MAP => {
275                // Deep clone the map
276                use crate::value::MapKey;
277                use std::collections::HashMap;
278                let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
279                debug_assert!(!ptr.is_null(), "Map pointer is null");
280                debug_assert!(
281                    (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
282                    "Map pointer is misaligned"
283                );
284                let boxed = Box::from_raw(ptr);
285                let cloned = boxed.clone();
286                std::mem::forget(boxed);
287                StackValue {
288                    slot0: DISC_MAP,
289                    slot1: Box::into_raw(cloned) as u64,
290                    slot2: 0,
291                    slot3: 0,
292                    slot4: 0,
293                }
294            }
295            DISC_CLOSURE => {
296                // The env is stored as Box<Arc<[Value]>>
297                let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
298                debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
299                debug_assert!(
300                    (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
301                    "Closure env pointer is misaligned"
302                );
303                let env_arc = &*env_box_ptr;
304                let cloned_env = Arc::clone(env_arc);
305                // Box the cloned Arc
306                let new_env_box = Box::new(cloned_env);
307                StackValue {
308                    slot0: DISC_CLOSURE,
309                    slot1: sv.slot1,
310                    slot2: Box::into_raw(new_env_box) as u64,
311                    slot3: 0,
312                    slot4: 0,
313                }
314            }
315            DISC_CHANNEL => {
316                // Arc refcount increment - O(1) clone
317                use crate::value::ChannelData;
318                let ptr = sv.slot1 as *const ChannelData;
319                debug_assert!(!ptr.is_null(), "Channel pointer is null");
320                let arc = Arc::from_raw(ptr);
321                let cloned = Arc::clone(&arc);
322                std::mem::forget(arc);
323                StackValue {
324                    slot0: DISC_CHANNEL,
325                    slot1: Arc::into_raw(cloned) as u64,
326                    slot2: 0,
327                    slot3: 0,
328                    slot4: 0,
329                }
330            }
331            DISC_WEAVECTX => {
332                // Arc refcount increment for both channels - O(1) clone
333                use crate::value::ChannelData;
334                let yield_ptr = sv.slot1 as *const ChannelData;
335                let resume_ptr = sv.slot2 as *const ChannelData;
336                debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
337                debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
338                let yield_arc = Arc::from_raw(yield_ptr);
339                let resume_arc = Arc::from_raw(resume_ptr);
340                let yield_cloned = Arc::clone(&yield_arc);
341                let resume_cloned = Arc::clone(&resume_arc);
342                std::mem::forget(yield_arc);
343                std::mem::forget(resume_arc);
344                StackValue {
345                    slot0: DISC_WEAVECTX,
346                    slot1: Arc::into_raw(yield_cloned) as u64,
347                    slot2: Arc::into_raw(resume_cloned) as u64,
348                    slot3: 0,
349                    slot4: 0,
350                }
351            }
352            _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
353        }
354    }
355}
356
357/// Drop a StackValue, decrementing refcounts for heap types
358///
359/// # Safety
360/// The StackValue must contain valid data for its discriminant.
361#[inline]
362pub unsafe fn drop_stack_value(sv: StackValue) {
363    unsafe {
364        match sv.slot0 {
365            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
366                // No heap allocation, nothing to drop
367            }
368            DISC_STRING => {
369                // Reconstruct SeqString and let it drop
370                use crate::seqstring::SeqString;
371                let ptr = sv.slot1 as *const u8;
372                let len = sv.slot2 as usize;
373                let capacity = sv.slot3 as usize;
374                let global = sv.slot4 != 0;
375                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
376                // SeqString::drop will free global strings, ignore arena strings
377            }
378            DISC_VARIANT => {
379                use crate::value::VariantData;
380                let _ = Arc::from_raw(sv.slot1 as *const VariantData);
381            }
382            DISC_MAP => {
383                use crate::value::MapKey;
384                use std::collections::HashMap;
385                let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
386            }
387            DISC_CLOSURE => {
388                // Unbox and drop the Arc<[Value]>
389                let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
390            }
391            DISC_CHANNEL => {
392                use crate::value::ChannelData;
393                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
394            }
395            DISC_WEAVECTX => {
396                use crate::value::ChannelData;
397                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
398                let _ = Arc::from_raw(sv.slot2 as *const ChannelData);
399            }
400            _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
401        }
402    }
403}
404
405// ============================================================================
406// Core Stack Operations
407// ============================================================================
408
409/// Push a value onto the stack
410///
411/// Stores the value at the current stack pointer and returns sp + 1.
412///
413/// # Safety
414/// Stack pointer must be valid and have room for the value.
415#[inline]
416pub unsafe fn push(stack: Stack, value: Value) -> Stack {
417    unsafe {
418        let sv = value_to_stack_value(value);
419        *stack = sv;
420        stack.add(1)
421    }
422}
423
424/// Push a StackValue directly onto the stack
425///
426/// # Safety
427/// Stack pointer must be valid and have room for the value.
428#[inline]
429pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
430    unsafe {
431        *stack = sv;
432        stack.add(1)
433    }
434}
435
436/// Pop a value from the stack
437///
438/// Returns (new_sp, value) where new_sp = sp - 1.
439///
440/// # Safety
441/// Stack must not be at base (must have at least one value).
442#[inline]
443pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
444    unsafe {
445        let new_sp = stack.sub(1);
446        let sv = *new_sp;
447        (new_sp, stack_value_to_value(sv))
448    }
449}
450
451/// Pop a StackValue directly from the stack
452///
453/// # Safety
454/// Stack must not be at base and must have at least one value.
455#[inline]
456pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
457    unsafe {
458        let new_sp = stack.sub(1);
459        let sv = *new_sp;
460        (new_sp, sv)
461    }
462}
463
464/// Pop two values from the stack (for binary operations)
465///
466/// Returns (new_sp, a, b) where a was below b on stack.
467/// Stack effect: ( a b -- ) returns (a, b)
468///
469/// # Safety
470/// Stack must have at least two values.
471#[inline]
472pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
473    unsafe {
474        let (sp, b) = pop(stack);
475        let (sp, a) = pop(sp);
476        (sp, a, b)
477    }
478}
479
480/// Pop three values from the stack (for ternary operations)
481///
482/// # Safety
483/// Stack must have at least three values.
484#[inline]
485pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
486    unsafe {
487        let (sp, c) = pop(stack);
488        let (sp, b) = pop(sp);
489        let (sp, a) = pop(sp);
490        (sp, a, b, c)
491    }
492}
493
494/// Peek at the top value without removing it
495///
496/// # Safety
497/// Stack must have at least one value.
498#[inline]
499pub unsafe fn peek(stack: Stack) -> Value {
500    unsafe {
501        let sv = *stack.sub(1);
502        // Don't consume - need to clone for heap types
503        stack_value_to_value(clone_stack_value(&sv))
504    }
505}
506
507/// Peek at the raw StackValue without removing it
508///
509/// # Safety
510/// Stack must have at least one value.
511#[inline]
512pub unsafe fn peek_sv(stack: Stack) -> StackValue {
513    unsafe { *stack.sub(1) }
514}
515
516/// Check if stack is empty (at base pointer)
517/// Note: With tagged stack, we need to compare against base, not null
518#[inline]
519pub fn is_empty(_stack: Stack) -> bool {
520    // For now, assume stack is never truly empty in valid programs
521    // The caller should track base pointer if needed
522    false
523}
524
525// ============================================================================
526// FFI Stack Operations
527// ============================================================================
528
529/// Duplicate the top value on the stack: ( a -- a a )
530///
531/// # Safety
532/// Stack must have at least one value.
533#[unsafe(no_mangle)]
534pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
535    unsafe {
536        let sv = peek_sv(stack);
537        let cloned = clone_stack_value(&sv);
538        push_sv(stack, cloned)
539    }
540}
541
542/// Drop the top value from the stack: ( a -- )
543///
544/// # Safety
545/// Stack must have at least one value.
546#[inline]
547pub unsafe fn drop_top(stack: Stack) -> Stack {
548    unsafe {
549        let (new_sp, sv) = pop_sv(stack);
550        drop_stack_value(sv);
551        new_sp
552    }
553}
554
555/// Alias for drop to avoid LLVM keyword conflicts
556///
557/// # Safety
558/// Stack must have at least one value.
559#[unsafe(no_mangle)]
560pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
561    unsafe { drop_top(stack) }
562}
563
564/// Push an arbitrary Value onto the stack (for LLVM codegen)
565///
566/// # Safety
567/// Stack pointer must be valid and have room for the value.
568#[allow(improper_ctypes_definitions)]
569#[unsafe(no_mangle)]
570pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
571    unsafe { push(stack, value) }
572}
573
574/// Swap the top two values: ( a b -- b a )
575///
576/// # Safety
577/// Stack must have at least two values.
578#[unsafe(no_mangle)]
579pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
580    unsafe {
581        let ptr_b = stack.sub(1);
582        let ptr_a = stack.sub(2);
583        let a = *ptr_a;
584        let b = *ptr_b;
585        *ptr_a = b;
586        *ptr_b = a;
587        stack
588    }
589}
590
591/// Copy the second value to the top: ( a b -- a b a )
592///
593/// # Safety
594/// Stack must have at least two values.
595#[unsafe(no_mangle)]
596pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
597    unsafe {
598        let sv_a = *stack.sub(2);
599        let cloned = clone_stack_value(&sv_a);
600        push_sv(stack, cloned)
601    }
602}
603
604/// Rotate the top three values: ( a b c -- b c a )
605///
606/// # Safety
607/// Stack must have at least three values.
608#[unsafe(no_mangle)]
609pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
610    unsafe {
611        let ptr_c = stack.sub(1);
612        let ptr_b = stack.sub(2);
613        let ptr_a = stack.sub(3);
614        let a = *ptr_a;
615        let b = *ptr_b;
616        let c = *ptr_c;
617        *ptr_a = b;
618        *ptr_b = c;
619        *ptr_c = a;
620        stack
621    }
622}
623
624/// Remove the second value: ( a b -- b )
625///
626/// # Safety
627/// Stack must have at least two values.
628#[unsafe(no_mangle)]
629pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
630    unsafe {
631        let ptr_b = stack.sub(1);
632        let ptr_a = stack.sub(2);
633        let a = *ptr_a;
634        let b = *ptr_b;
635        drop_stack_value(a);
636        *ptr_a = b;
637        stack.sub(1)
638    }
639}
640
641/// Copy top value below second value: ( a b -- b a b )
642///
643/// # Safety
644/// Stack must have at least two values.
645#[unsafe(no_mangle)]
646pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
647    unsafe {
648        let ptr_b = stack.sub(1);
649        let ptr_a = stack.sub(2);
650        let a = *ptr_a;
651        let b = *ptr_b;
652        let b_clone = clone_stack_value(&b);
653        *ptr_a = b;
654        *ptr_b = a;
655        push_sv(stack, b_clone)
656    }
657}
658
659/// Duplicate top two values: ( a b -- a b a b )
660///
661/// # Safety
662/// Stack must have at least two values.
663#[unsafe(no_mangle)]
664pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
665    unsafe {
666        let sv_a = *stack.sub(2);
667        let sv_b = *stack.sub(1);
668        let a_clone = clone_stack_value(&sv_a);
669        let b_clone = clone_stack_value(&sv_b);
670        let sp = push_sv(stack, a_clone);
671        push_sv(sp, b_clone)
672    }
673}
674
675/// Drop top three values: ( a b c -- )
676///
677/// # Safety
678/// Stack must have at least three values.
679#[unsafe(no_mangle)]
680pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
681    unsafe {
682        let (sp, sv_c) = pop_sv(stack);
683        let (sp, sv_b) = pop_sv(sp);
684        let (sp, sv_a) = pop_sv(sp);
685        drop_stack_value(sv_c);
686        drop_stack_value(sv_b);
687        drop_stack_value(sv_a);
688        sp
689    }
690}
691
692/// Pick: Copy the nth value to the top
693/// ( ... xn ... x1 x0 n -- ... xn ... x1 x0 xn )
694///
695/// # Safety
696/// Stack must have at least n+1 values (plus the index value).
697///
698/// # Panics
699/// - If the top value is not an Int
700/// - If n is negative
701/// - If n exceeds the current stack depth
702#[unsafe(no_mangle)]
703pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
704    unsafe {
705        // Get n from top of stack
706        let (sp, n_val) = pop(stack);
707        let n_raw = match n_val {
708            Value::Int(i) => i,
709            _ => panic!("pick: expected Int"),
710        };
711
712        // Bounds check: n must be non-negative
713        if n_raw < 0 {
714            panic!("pick: index cannot be negative (got {})", n_raw);
715        }
716        let n = n_raw as usize;
717
718        // Check stack depth to prevent out-of-bounds access
719        let base = get_stack_base();
720        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
721        if n >= depth {
722            panic!(
723                "pick: index {} exceeds stack depth {} (need at least {} values)",
724                n,
725                depth,
726                n + 1
727            );
728        }
729
730        // Get the value at depth n (0 = top after popping n)
731        let sv = *sp.sub(n + 1);
732        let cloned = clone_stack_value(&sv);
733        push_sv(sp, cloned)
734    }
735}
736
737/// Roll: Rotate n+1 items, bringing the item at depth n to the top
738/// ( x_n x_(n-1) ... x_1 x_0 n -- x_(n-1) ... x_1 x_0 x_n )
739///
740/// # Safety
741/// Stack must have at least n+1 values (plus the index value).
742///
743/// # Panics
744/// - If the top value is not an Int
745/// - If n is negative
746/// - If n exceeds the current stack depth
747#[unsafe(no_mangle)]
748pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
749    unsafe {
750        // Get n from top of stack
751        let (sp, n_val) = pop(stack);
752        let n_raw = match n_val {
753            Value::Int(i) => i,
754            _ => panic!("roll: expected Int"),
755        };
756
757        // Bounds check: n must be non-negative
758        if n_raw < 0 {
759            panic!("roll: index cannot be negative (got {})", n_raw);
760        }
761        let n = n_raw as usize;
762
763        if n == 0 {
764            return sp;
765        }
766        if n == 1 {
767            return patch_seq_swap(sp);
768        }
769        if n == 2 {
770            return patch_seq_rot(sp);
771        }
772
773        // Check stack depth to prevent out-of-bounds access
774        let base = get_stack_base();
775        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
776        if n >= depth {
777            panic!(
778                "roll: index {} exceeds stack depth {} (need at least {} values)",
779                n,
780                depth,
781                n + 1
782            );
783        }
784
785        // General case: save item at depth n, shift others, put saved at top
786        let src_ptr = sp.sub(n + 1);
787        let saved = *src_ptr;
788
789        // Shift items down: memmove from src+1 to src, n items
790        std::ptr::copy(src_ptr.add(1), src_ptr, n);
791
792        // Put saved item at top (sp - 1)
793        *sp.sub(1) = saved;
794
795        sp
796    }
797}
798
799/// Clone a stack segment
800///
801/// Clones `count` StackValues from src to dst, handling refcounts.
802///
803/// # Safety
804/// Both src and dst must be valid stack pointers with sufficient space for count values.
805pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
806    unsafe {
807        for i in 0..count {
808            let sv = *src.sub(count - i);
809            let cloned = clone_stack_value(&sv);
810            *dst.add(i) = cloned;
811        }
812    }
813}
814
815// ============================================================================
816// Coroutine-Local Stack Base Tracking (for spawn)
817// ============================================================================
818//
819// IMPORTANT: We use May's coroutine_local! instead of thread_local! because
820// May coroutines can migrate between OS threads. Using thread_local would cause
821// STACK_BASE to be lost when a coroutine is moved to a different thread.
822
823use std::cell::Cell;
824
825// Use coroutine-local storage that moves with the coroutine
826may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
827
828/// Set the current strand's stack base (called at strand entry)
829///
830/// # Safety
831/// Base pointer must be a valid stack pointer for the current strand.
832#[unsafe(no_mangle)]
833pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
834    STACK_BASE.with(|cell| {
835        cell.set(base as usize);
836    });
837}
838
839/// Get the current strand's stack base
840#[inline]
841pub fn get_stack_base() -> Stack {
842    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
843}
844
845/// Clone the current stack for spawning a child strand
846///
847/// Allocates a new stack buffer and copies all values from the current stack.
848/// Returns a pointer to the first value in the new stack (like Stack convention).
849///
850/// # Safety
851/// - Current stack must have a valid base set via set_stack_base
852/// - sp must point to a valid position within the current stack
853#[unsafe(no_mangle)]
854pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
855    unsafe {
856        let (new_sp, _base) = clone_stack_with_base(sp);
857        new_sp
858    }
859}
860
861/// Clone the current stack for spawning, returning both base and sp.
862///
863/// This is used by spawn to create a copy of the parent's stack for the child strand.
864/// Returns (new_sp, new_base) so the spawn mechanism can set STACK_BASE for the child.
865///
866/// # Safety
867/// Current stack must have a valid base set via set_stack_base and sp must point to a valid position.
868pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
869    let base = get_stack_base();
870    if base.is_null() {
871        panic!("clone_stack: stack base not set");
872    }
873
874    // Calculate depth (number of values on stack)
875    let depth = unsafe { sp.offset_from(base) as usize };
876
877    if depth == 0 {
878        // Empty stack - still need to allocate a buffer
879        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
880        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
881        let new_base = new_stack.base;
882        std::mem::forget(new_stack); // Don't drop - caller owns memory
883        return (new_base, new_base);
884    }
885
886    // Allocate new stack with capacity for at least the current depth
887    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
888    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
889    let new_stack = TaggedStack::new(capacity);
890    let new_base = new_stack.base;
891    std::mem::forget(new_stack); // Don't drop - caller owns memory
892
893    // Clone all values from base to sp
894    unsafe {
895        for i in 0..depth {
896            let sv = &*base.add(i);
897            let cloned = clone_stack_value(sv);
898            *new_base.add(i) = cloned;
899        }
900    }
901
902    // Return both sp and base
903    unsafe { (new_base.add(depth), new_base) }
904}
905
906// ============================================================================
907// Short Aliases for Internal/Test Use
908// ============================================================================
909
910pub use patch_seq_2dup as two_dup;
911pub use patch_seq_3drop as three_drop;
912pub use patch_seq_dup as dup;
913pub use patch_seq_nip as nip;
914pub use patch_seq_over as over;
915pub use patch_seq_pick_op as pick;
916pub use patch_seq_roll as roll;
917pub use patch_seq_rot as rot;
918pub use patch_seq_swap as swap;
919pub use patch_seq_tuck as tuck;
920
921// ============================================================================
922// Stack Allocation Helpers
923// ============================================================================
924
925/// Allocate a new stack with default capacity.
926/// Returns a pointer to the base of the stack (where first push goes).
927///
928/// # Note
929/// The returned stack is allocated but not tracked.
930/// The memory will be leaked when the caller is done with it.
931/// This is used for temporary stacks in quotation calls and tests.
932pub fn alloc_stack() -> Stack {
933    use crate::tagged_stack::TaggedStack;
934    let stack = TaggedStack::with_default_capacity();
935    let base = stack.base;
936    std::mem::forget(stack); // Don't drop - caller owns memory
937    base
938}
939
940/// Allocate a new test stack and set it as the stack base
941/// This is used in tests that need clone_stack to work
942#[cfg(test)]
943pub fn alloc_test_stack() -> Stack {
944    let stack = alloc_stack();
945    unsafe { patch_seq_set_stack_base(stack) };
946    stack
947}
948
949/// Dump all values on the stack (for REPL debugging)
950///
951/// Prints all stack values in a readable format, then clears the stack.
952/// Returns the stack base (empty stack).
953///
954/// # Safety
955/// - Stack base must have been set via set_stack_base
956/// - sp must be a valid stack pointer
957/// - All stack values between base and sp must be valid
958#[unsafe(no_mangle)]
959pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
960    let base = get_stack_base();
961    if base.is_null() {
962        eprintln!("[stack.dump: base not set]");
963        return sp;
964    }
965
966    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
967
968    if depth == 0 {
969        println!("stack:");
970    } else {
971        use std::io::Write;
972        print!("stack: ");
973        for i in 0..depth {
974            if i > 0 {
975                print!(" ");
976            }
977            unsafe {
978                let sv = *base.add(i);
979                print_stack_value(&sv);
980            }
981        }
982        println!();
983        // Flush stdout to ensure output is visible immediately
984        // This prevents partial output if the program terminates unexpectedly
985        let _ = std::io::stdout().flush();
986
987        // Drop all heap-allocated values to prevent memory leaks
988        for i in 0..depth {
989            unsafe {
990                let sv = *base.add(i);
991                drop_stack_value(sv);
992            }
993        }
994    }
995
996    // Return base (empty stack)
997    base
998}
999
1000/// Print a stack value in a readable format
1001///
1002/// # Safety Requirements
1003/// The StackValue must be valid and not previously dropped. For strings,
1004/// the pointer (slot1) must point to valid, readable memory of length slot2.
1005/// This is guaranteed when called from stack.dump on freshly computed values.
1006fn print_stack_value(sv: &StackValue) {
1007    match sv.slot0 {
1008        DISC_INT => print!("{}", sv.slot1 as i64),
1009        DISC_FLOAT => {
1010            let f = f64::from_bits(sv.slot1);
1011            if f.fract() == 0.0 && f.is_finite() {
1012                print!("{}.0", f)
1013            } else {
1014                print!("{}", f)
1015            }
1016        }
1017        DISC_BOOL => print!("{}", if sv.slot1 != 0 { "true" } else { "false" }),
1018        DISC_STRING => {
1019            let ptr = sv.slot1 as *const u8;
1020            let len = sv.slot2 as usize;
1021            // Safety: We trust the stack value is valid. The pointer was allocated
1022            // by SeqString and hasn't been freed yet (we print before dropping).
1023            // Additional defensive checks for null/zero-length.
1024            if ptr.is_null() || len == 0 {
1025                print!("\"\"");
1026            } else if len > 10_000_000 {
1027                // Sanity check: reject unreasonably large strings (likely corruption)
1028                print!("<string:invalid length {}>", len);
1029            } else {
1030                unsafe {
1031                    let slice = std::slice::from_raw_parts(ptr, len);
1032                    if let Ok(s) = std::str::from_utf8(slice) {
1033                        print!("\"{}\"", s);
1034                    } else {
1035                        print!("<string:{} bytes, non-utf8>", len);
1036                    }
1037                }
1038            }
1039        }
1040        DISC_VARIANT => print!("<variant>"),
1041        DISC_MAP => print!("<map>"),
1042        DISC_QUOTATION => print!("<quotation>"),
1043        DISC_CLOSURE => print!("<closure>"),
1044        DISC_CHANNEL => print!("<channel>"),
1045        _ => print!("<unknown:{}>", sv.slot0),
1046    }
1047}
1048
1049/// Macro to create a test stack
1050#[macro_export]
1051macro_rules! test_stack {
1052    () => {{
1053        use $crate::tagged_stack::StackValue;
1054        static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1055        unsafe { BUFFER.as_mut_ptr() }
1056    }};
1057}