seq_runtime/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! This module implements the stack using a contiguous array of 40-byte StackValue entries.
4//! Each StackValue has the layout: { slot0: discriminant, slot1-4: payload }
5//!
6//! The Stack type is a pointer to the "current position" (where next push goes).
7//! - Push: store at *sp, return sp + 1
8//! - Pop: return sp - 1, read from *(sp - 1)
9
10use crate::tagged_stack::StackValue;
11use crate::value::Value;
12#[cfg(not(feature = "nanbox"))]
13use std::sync::Arc;
14
15/// Stack: A pointer to the current position in a contiguous array of StackValue.
16///
17/// Points to where the next value would be pushed.
18/// sp - 1 points to the top value, sp - 2 to second, etc.
19pub type Stack = *mut StackValue;
20
21/// Returns the size of a StackValue in bytes (for stack depth calculations)
22#[inline]
23pub fn stack_value_size() -> usize {
24    std::mem::size_of::<StackValue>()
25}
26
27/// Discriminant values matching codegen
28pub const DISC_INT: u64 = 0;
29pub const DISC_FLOAT: u64 = 1;
30pub const DISC_BOOL: u64 = 2;
31pub const DISC_STRING: u64 = 3;
32pub const DISC_VARIANT: u64 = 4;
33pub const DISC_MAP: u64 = 5;
34pub const DISC_QUOTATION: u64 = 6;
35pub const DISC_CLOSURE: u64 = 7;
36pub const DISC_CHANNEL: u64 = 8;
37pub const DISC_WEAVECTX: u64 = 9;
38pub const DISC_SYMBOL: u64 = 10;
39
40/// Convert a Value to a StackValue for pushing onto the tagged stack
41#[cfg(not(feature = "nanbox"))]
42#[inline]
43pub fn value_to_stack_value(value: Value) -> StackValue {
44    match value {
45        Value::Int(i) => StackValue {
46            slot0: DISC_INT,
47            slot1: i as u64,
48            slot2: 0,
49            slot3: 0,
50            slot4: 0,
51        },
52        Value::Float(f) => StackValue {
53            slot0: DISC_FLOAT,
54            slot1: f.to_bits(),
55            slot2: 0,
56            slot3: 0,
57            slot4: 0,
58        },
59        Value::Bool(b) => StackValue {
60            slot0: DISC_BOOL,
61            slot1: if b { 1 } else { 0 },
62            slot2: 0,
63            slot3: 0,
64            slot4: 0,
65        },
66        Value::String(s) => {
67            // SeqString has: ptr, len, capacity, global
68            // Store these in slots 1-4
69            let (ptr, len, capacity, global) = s.into_raw_parts();
70            StackValue {
71                slot0: DISC_STRING,
72                slot1: ptr as u64,
73                slot2: len as u64,
74                slot3: capacity as u64,
75                slot4: if global { 1 } else { 0 },
76            }
77        }
78        Value::Symbol(s) => {
79            // Symbol uses the same SeqString representation as String
80            let (ptr, len, capacity, global) = s.into_raw_parts();
81            StackValue {
82                slot0: DISC_SYMBOL,
83                slot1: ptr as u64,
84                slot2: len as u64,
85                slot3: capacity as u64,
86                slot4: if global { 1 } else { 0 },
87            }
88        }
89        Value::Variant(v) => {
90            let ptr = Arc::into_raw(v) as u64;
91            StackValue {
92                slot0: DISC_VARIANT,
93                slot1: ptr,
94                slot2: 0,
95                slot3: 0,
96                slot4: 0,
97            }
98        }
99        Value::Map(m) => {
100            let ptr = Box::into_raw(m) as u64;
101            StackValue {
102                slot0: DISC_MAP,
103                slot1: ptr,
104                slot2: 0,
105                slot3: 0,
106                slot4: 0,
107            }
108        }
109        Value::Quotation { wrapper, impl_ } => StackValue {
110            slot0: DISC_QUOTATION,
111            slot1: wrapper as u64,
112            slot2: impl_ as u64,
113            slot3: 0,
114            slot4: 0,
115        },
116        Value::Closure { fn_ptr, env } => {
117            // Arc<[Value]> is a fat pointer - use Box to store it
118            let env_box = Box::new(env);
119            let env_ptr = Box::into_raw(env_box) as u64;
120            StackValue {
121                slot0: DISC_CLOSURE,
122                slot1: fn_ptr as u64,
123                slot2: env_ptr,
124                slot3: 0,
125                slot4: 0,
126            }
127        }
128        Value::Channel(ch) => {
129            // Store Arc<ChannelData> as raw pointer
130            let ptr = Arc::into_raw(ch) as u64;
131            StackValue {
132                slot0: DISC_CHANNEL,
133                slot1: ptr,
134                slot2: 0,
135                slot3: 0,
136                slot4: 0,
137            }
138        }
139        Value::WeaveCtx {
140            yield_chan,
141            resume_chan,
142        } => {
143            // Store both Arc<WeaveChannelData> as raw pointers
144            let yield_ptr = Arc::into_raw(yield_chan) as u64;
145            let resume_ptr = Arc::into_raw(resume_chan) as u64;
146            StackValue {
147                slot0: DISC_WEAVECTX,
148                slot1: yield_ptr,
149                slot2: resume_ptr,
150                slot3: 0,
151                slot4: 0,
152            }
153        }
154    }
155}
156
157/// Convert a Value to a StackValue for pushing onto the tagged stack (NaN-boxed version)
158#[cfg(feature = "nanbox")]
159#[inline]
160pub fn value_to_stack_value(value: Value) -> StackValue {
161    StackValue(crate::nanbox::NanBoxedValue::from_value(&value))
162}
163
164/// Convert a StackValue back to a Value
165///
166/// # Safety
167/// The StackValue must contain valid data for its discriminant
168#[cfg(not(feature = "nanbox"))]
169#[inline]
170pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
171    unsafe {
172        match sv.slot0 {
173            DISC_INT => Value::Int(sv.slot1 as i64),
174            DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
175            DISC_BOOL => Value::Bool(sv.slot1 != 0),
176            DISC_STRING => {
177                use crate::seqstring::SeqString;
178                let ptr = sv.slot1 as *const u8;
179                let len = sv.slot2 as usize;
180                let capacity = sv.slot3 as usize;
181                let global = sv.slot4 != 0;
182                Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
183            }
184            DISC_VARIANT => {
185                use crate::value::VariantData;
186                let arc = Arc::from_raw(sv.slot1 as *const VariantData);
187                Value::Variant(arc)
188            }
189            DISC_MAP => {
190                use crate::value::MapKey;
191                use std::collections::HashMap;
192                let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
193                Value::Map(boxed)
194            }
195            DISC_QUOTATION => Value::Quotation {
196                wrapper: sv.slot1 as usize,
197                impl_: sv.slot2 as usize,
198            },
199            DISC_CLOSURE => {
200                // Unbox the Arc<[Value]> that we boxed in value_to_stack_value
201                let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
202                Value::Closure {
203                    fn_ptr: sv.slot1 as usize,
204                    env: *env_box,
205                }
206            }
207            DISC_CHANNEL => {
208                use crate::value::ChannelData;
209                let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
210                Value::Channel(arc)
211            }
212            DISC_WEAVECTX => {
213                use crate::value::WeaveChannelData;
214                let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
215                let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
216                Value::WeaveCtx {
217                    yield_chan,
218                    resume_chan,
219                }
220            }
221            DISC_SYMBOL => {
222                use crate::seqstring::SeqString;
223                let ptr = sv.slot1 as *const u8;
224                let len = sv.slot2 as usize;
225                let capacity = sv.slot3 as usize;
226                let global = sv.slot4 != 0;
227                Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
228            }
229            _ => panic!("Invalid discriminant: {}", sv.slot0),
230        }
231    }
232}
233
234/// Convert a StackValue back to a Value (NaN-boxed version)
235///
236/// # Safety
237/// The StackValue must contain a valid NanBoxedValue
238#[cfg(feature = "nanbox")]
239#[inline]
240pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
241    unsafe { sv.0.to_value() }
242}
243
244/// Clone a StackValue from LLVM IR - reads from src pointer, writes to dst pointer.
245/// This is the FFI-callable version for inline codegen that avoids ABI issues
246/// with passing large structs by value.
247///
248/// # Safety
249/// Both src and dst pointers must be valid and properly aligned StackValue pointers.
250#[unsafe(no_mangle)]
251pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
252    unsafe {
253        let sv = &*src;
254        let cloned = clone_stack_value(sv);
255        *dst = cloned;
256    }
257}
258
259/// Clone a StackValue, handling reference counting for heap types.
260///
261/// # Cloning Strategy by Type
262/// - **Int, Float, Bool, Quotation**: Bitwise copy (no heap allocation)
263/// - **String**: Deep copy to a new global (refcounted) string. This is necessary
264///   because the source may be an arena-allocated string that would become invalid
265///   when the arena resets. Global strings are heap-allocated with Arc refcounting.
266/// - **Variant**: Arc refcount increment (O(1), shares underlying data)
267/// - **Map**: Deep clone of the HashMap and all contained values
268/// - **Closure**: Deep clone of the Arc<[Value]> environment
269/// - **Channel**: Arc refcount increment (O(1), shares underlying sender/receiver)
270///
271/// # Safety
272/// The StackValue must contain valid data for its discriminant.
273#[cfg(not(feature = "nanbox"))]
274#[inline]
275pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
276    unsafe {
277        match sv.slot0 {
278            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
279            DISC_STRING => {
280                // Deep copy: arena strings may become invalid, so always create a global string
281                let ptr = sv.slot1 as *const u8;
282                let len = sv.slot2 as usize;
283                debug_assert!(!ptr.is_null(), "String pointer is null");
284                // Read the string content without taking ownership
285                let slice = std::slice::from_raw_parts(ptr, len);
286                // Validate UTF-8 in debug builds, skip in release for performance
287                #[cfg(debug_assertions)]
288                let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
289                #[cfg(not(debug_assertions))]
290                let s = std::str::from_utf8_unchecked(slice);
291                // Clone to a new global string
292                let cloned = crate::seqstring::global_string(s.to_string());
293                let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
294                StackValue {
295                    slot0: DISC_STRING,
296                    slot1: new_ptr as u64,
297                    slot2: new_len as u64,
298                    slot3: new_cap as u64,
299                    slot4: if new_global { 1 } else { 0 },
300                }
301            }
302            DISC_VARIANT => {
303                use crate::value::VariantData;
304                let ptr = sv.slot1 as *const VariantData;
305                debug_assert!(!ptr.is_null(), "Variant pointer is null");
306                debug_assert!(
307                    (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
308                    "Variant pointer is misaligned"
309                );
310                let arc = Arc::from_raw(ptr);
311                let cloned = Arc::clone(&arc);
312                std::mem::forget(arc);
313                StackValue {
314                    slot0: DISC_VARIANT,
315                    slot1: Arc::into_raw(cloned) as u64,
316                    slot2: 0,
317                    slot3: 0,
318                    slot4: 0,
319                }
320            }
321            DISC_MAP => {
322                // Deep clone the map
323                use crate::value::MapKey;
324                use std::collections::HashMap;
325                let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
326                debug_assert!(!ptr.is_null(), "Map pointer is null");
327                debug_assert!(
328                    (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
329                    "Map pointer is misaligned"
330                );
331                let boxed = Box::from_raw(ptr);
332                let cloned = boxed.clone();
333                std::mem::forget(boxed);
334                StackValue {
335                    slot0: DISC_MAP,
336                    slot1: Box::into_raw(cloned) as u64,
337                    slot2: 0,
338                    slot3: 0,
339                    slot4: 0,
340                }
341            }
342            DISC_CLOSURE => {
343                // The env is stored as Box<Arc<[Value]>>
344                let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
345                debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
346                debug_assert!(
347                    (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
348                    "Closure env pointer is misaligned"
349                );
350                let env_arc = &*env_box_ptr;
351                let cloned_env = Arc::clone(env_arc);
352                // Box the cloned Arc
353                let new_env_box = Box::new(cloned_env);
354                StackValue {
355                    slot0: DISC_CLOSURE,
356                    slot1: sv.slot1,
357                    slot2: Box::into_raw(new_env_box) as u64,
358                    slot3: 0,
359                    slot4: 0,
360                }
361            }
362            DISC_CHANNEL => {
363                // Arc refcount increment - O(1) clone
364                use crate::value::ChannelData;
365                let ptr = sv.slot1 as *const ChannelData;
366                debug_assert!(!ptr.is_null(), "Channel pointer is null");
367                let arc = Arc::from_raw(ptr);
368                let cloned = Arc::clone(&arc);
369                std::mem::forget(arc);
370                StackValue {
371                    slot0: DISC_CHANNEL,
372                    slot1: Arc::into_raw(cloned) as u64,
373                    slot2: 0,
374                    slot3: 0,
375                    slot4: 0,
376                }
377            }
378            DISC_WEAVECTX => {
379                // Arc refcount increment for both channels - O(1) clone
380                use crate::value::WeaveChannelData;
381                let yield_ptr = sv.slot1 as *const WeaveChannelData;
382                let resume_ptr = sv.slot2 as *const WeaveChannelData;
383                debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
384                debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
385                let yield_arc = Arc::from_raw(yield_ptr);
386                let resume_arc = Arc::from_raw(resume_ptr);
387                let yield_cloned = Arc::clone(&yield_arc);
388                let resume_cloned = Arc::clone(&resume_arc);
389                std::mem::forget(yield_arc);
390                std::mem::forget(resume_arc);
391                StackValue {
392                    slot0: DISC_WEAVECTX,
393                    slot1: Arc::into_raw(yield_cloned) as u64,
394                    slot2: Arc::into_raw(resume_cloned) as u64,
395                    slot3: 0,
396                    slot4: 0,
397                }
398            }
399            DISC_SYMBOL => {
400                let capacity = sv.slot3 as usize;
401                let is_global = sv.slot4 != 0;
402
403                // Fast path (Issue #166): interned symbols can share the static pointer
404                // Interned symbols have capacity=0 and global=true
405                if capacity == 0 && is_global {
406                    let ptr = sv.slot1 as *const u8;
407                    let len = sv.slot2 as usize;
408
409                    // Safety: Interned symbols are guaranteed to point to valid static data.
410                    // The compiler generates these in get_symbol_global(), which always
411                    // creates valid string globals. A null pointer here indicates compiler bug.
412                    debug_assert!(
413                        !ptr.is_null(),
414                        "Interned symbol has null pointer in clone fast path"
415                    );
416
417                    // Create a new SeqString that shares the static data.
418                    // This properly maintains ownership semantics even though
419                    // Drop is a no-op for capacity=0 symbols.
420                    let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
421                    let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
422                    StackValue {
423                        slot0: DISC_SYMBOL,
424                        slot1: new_ptr as u64,
425                        slot2: new_len as u64,
426                        slot3: new_cap as u64,
427                        slot4: if new_global { 1 } else { 0 },
428                    }
429                } else {
430                    // Deep copy: arena symbols may become invalid
431                    let ptr = sv.slot1 as *const u8;
432                    let len = sv.slot2 as usize;
433                    debug_assert!(!ptr.is_null(), "Symbol pointer is null");
434                    let slice = std::slice::from_raw_parts(ptr, len);
435                    #[cfg(debug_assertions)]
436                    let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
437                    #[cfg(not(debug_assertions))]
438                    let s = std::str::from_utf8_unchecked(slice);
439                    let cloned = crate::seqstring::global_string(s.to_string());
440                    let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
441                    StackValue {
442                        slot0: DISC_SYMBOL,
443                        slot1: new_ptr as u64,
444                        slot2: new_len as u64,
445                        slot3: new_cap as u64,
446                        slot4: if new_global { 1 } else { 0 },
447                    }
448                }
449            }
450            _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
451        }
452    }
453}
454
455/// Clone a StackValue (NaN-boxed version)
456///
457/// # Safety
458/// The StackValue must contain a valid NanBoxedValue
459#[cfg(feature = "nanbox")]
460#[inline]
461pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
462    StackValue(sv.0.clone_nanboxed())
463}
464
465/// Drop a StackValue, decrementing refcounts for heap types
466///
467/// # Safety
468/// The StackValue must contain valid data for its discriminant.
469#[cfg(not(feature = "nanbox"))]
470#[inline]
471pub unsafe fn drop_stack_value(sv: StackValue) {
472    unsafe {
473        match sv.slot0 {
474            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
475                // No heap allocation, nothing to drop
476            }
477            DISC_STRING => {
478                // Reconstruct SeqString and let it drop
479                use crate::seqstring::SeqString;
480                let ptr = sv.slot1 as *const u8;
481                let len = sv.slot2 as usize;
482                let capacity = sv.slot3 as usize;
483                let global = sv.slot4 != 0;
484                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485                // SeqString::drop will free global strings, ignore arena strings
486            }
487            DISC_VARIANT => {
488                use crate::value::VariantData;
489                let _ = Arc::from_raw(sv.slot1 as *const VariantData);
490            }
491            DISC_MAP => {
492                use crate::value::MapKey;
493                use std::collections::HashMap;
494                let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
495            }
496            DISC_CLOSURE => {
497                // Unbox and drop the Arc<[Value]>
498                let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
499            }
500            DISC_CHANNEL => {
501                use crate::value::ChannelData;
502                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
503            }
504            DISC_WEAVECTX => {
505                use crate::value::WeaveChannelData;
506                let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
507                let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
508            }
509            DISC_SYMBOL => {
510                // Reconstruct SeqString and let it drop (same as String)
511                use crate::seqstring::SeqString;
512                let ptr = sv.slot1 as *const u8;
513                let len = sv.slot2 as usize;
514                let capacity = sv.slot3 as usize;
515                let global = sv.slot4 != 0;
516                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
517            }
518            _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
519        }
520    }
521}
522
523/// Drop a StackValue (NaN-boxed version)
524///
525/// # Safety
526/// The StackValue must contain a valid NanBoxedValue
527#[cfg(feature = "nanbox")]
528#[inline]
529pub unsafe fn drop_stack_value(sv: StackValue) {
530    unsafe { sv.0.drop_nanboxed() }
531}
532
533// ============================================================================
534// Core Stack Operations
535// ============================================================================
536
537/// Push a value onto the stack
538///
539/// Stores the value at the current stack pointer and returns sp + 1.
540///
541/// # Safety
542/// Stack pointer must be valid and have room for the value.
543#[inline]
544pub unsafe fn push(stack: Stack, value: Value) -> Stack {
545    unsafe {
546        let sv = value_to_stack_value(value);
547        *stack = sv;
548        stack.add(1)
549    }
550}
551
552/// Push a StackValue directly onto the stack
553///
554/// # Safety
555/// Stack pointer must be valid and have room for the value.
556#[inline]
557pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
558    unsafe {
559        *stack = sv;
560        stack.add(1)
561    }
562}
563
564/// Pop a value from the stack
565///
566/// Returns (new_sp, value) where new_sp = sp - 1.
567///
568/// # Safety
569/// Stack must not be at base (must have at least one value).
570#[inline]
571pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
572    unsafe {
573        let new_sp = stack.sub(1);
574        let sv = *new_sp;
575        (new_sp, stack_value_to_value(sv))
576    }
577}
578
579/// Pop a StackValue directly from the stack
580///
581/// # Safety
582/// Stack must not be at base and must have at least one value.
583#[inline]
584pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
585    unsafe {
586        let new_sp = stack.sub(1);
587        let sv = *new_sp;
588        (new_sp, sv)
589    }
590}
591
592/// Pop two values from the stack (for binary operations)
593///
594/// Returns (new_sp, a, b) where a was below b on stack.
595/// Stack effect: ( a b -- ) returns (a, b)
596///
597/// # Safety
598/// Stack must have at least two values.
599#[inline]
600pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
601    unsafe {
602        let (sp, b) = pop(stack);
603        let (sp, a) = pop(sp);
604        (sp, a, b)
605    }
606}
607
608/// Pop three values from the stack (for ternary operations)
609///
610/// # Safety
611/// Stack must have at least three values.
612#[inline]
613pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
614    unsafe {
615        let (sp, c) = pop(stack);
616        let (sp, b) = pop(sp);
617        let (sp, a) = pop(sp);
618        (sp, a, b, c)
619    }
620}
621
622/// Peek at the top value without removing it
623///
624/// # Safety
625/// Stack must have at least one value.
626#[inline]
627pub unsafe fn peek(stack: Stack) -> Value {
628    unsafe {
629        let sv = *stack.sub(1);
630        // Don't consume - need to clone for heap types
631        stack_value_to_value(clone_stack_value(&sv))
632    }
633}
634
635/// Peek at the raw StackValue without removing it
636///
637/// # Safety
638/// Stack must have at least one value.
639#[inline]
640pub unsafe fn peek_sv(stack: Stack) -> StackValue {
641    unsafe { *stack.sub(1) }
642}
643
644/// Check if stack is empty (at base pointer)
645/// Note: With tagged stack, we need to compare against base, not null
646#[inline]
647pub fn is_empty(_stack: Stack) -> bool {
648    // For now, assume stack is never truly empty in valid programs
649    // The caller should track base pointer if needed
650    false
651}
652
653// ============================================================================
654// FFI Stack Operations
655// ============================================================================
656
657/// Duplicate the top value on the stack: ( a -- a a )
658///
659/// # Safety
660/// Stack must have at least one value.
661#[unsafe(no_mangle)]
662pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
663    unsafe {
664        let sv = peek_sv(stack);
665        let cloned = clone_stack_value(&sv);
666        push_sv(stack, cloned)
667    }
668}
669
670/// Drop the top value from the stack: ( a -- )
671///
672/// # Safety
673/// Stack must have at least one value.
674#[inline]
675pub unsafe fn drop_top(stack: Stack) -> Stack {
676    unsafe {
677        let (new_sp, sv) = pop_sv(stack);
678        drop_stack_value(sv);
679        new_sp
680    }
681}
682
683/// Alias for drop to avoid LLVM keyword conflicts
684///
685/// # Safety
686/// Stack must have at least one value.
687#[unsafe(no_mangle)]
688pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
689    unsafe { drop_top(stack) }
690}
691
692/// Push an arbitrary Value onto the stack (for LLVM codegen)
693///
694/// # Safety
695/// Stack pointer must be valid and have room for the value.
696#[allow(improper_ctypes_definitions)]
697#[unsafe(no_mangle)]
698pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
699    unsafe { push(stack, value) }
700}
701
702/// Swap the top two values: ( a b -- b a )
703///
704/// # Safety
705/// Stack must have at least two values.
706#[unsafe(no_mangle)]
707pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
708    unsafe {
709        let ptr_b = stack.sub(1);
710        let ptr_a = stack.sub(2);
711        let a = *ptr_a;
712        let b = *ptr_b;
713        *ptr_a = b;
714        *ptr_b = a;
715        stack
716    }
717}
718
719/// Copy the second value to the top: ( a b -- a b a )
720///
721/// # Safety
722/// Stack must have at least two values.
723#[unsafe(no_mangle)]
724pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
725    unsafe {
726        let sv_a = *stack.sub(2);
727        let cloned = clone_stack_value(&sv_a);
728        push_sv(stack, cloned)
729    }
730}
731
732/// Rotate the top three values: ( a b c -- b c a )
733///
734/// # Safety
735/// Stack must have at least three values.
736#[unsafe(no_mangle)]
737pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
738    unsafe {
739        let ptr_c = stack.sub(1);
740        let ptr_b = stack.sub(2);
741        let ptr_a = stack.sub(3);
742        let a = *ptr_a;
743        let b = *ptr_b;
744        let c = *ptr_c;
745        *ptr_a = b;
746        *ptr_b = c;
747        *ptr_c = a;
748        stack
749    }
750}
751
752/// Remove the second value: ( a b -- b )
753///
754/// # Safety
755/// Stack must have at least two values.
756#[unsafe(no_mangle)]
757pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
758    unsafe {
759        let ptr_b = stack.sub(1);
760        let ptr_a = stack.sub(2);
761        let a = *ptr_a;
762        let b = *ptr_b;
763        drop_stack_value(a);
764        *ptr_a = b;
765        stack.sub(1)
766    }
767}
768
769/// Copy top value below second value: ( a b -- b a b )
770///
771/// # Safety
772/// Stack must have at least two values.
773#[unsafe(no_mangle)]
774pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
775    unsafe {
776        let ptr_b = stack.sub(1);
777        let ptr_a = stack.sub(2);
778        let a = *ptr_a;
779        let b = *ptr_b;
780        let b_clone = clone_stack_value(&b);
781        *ptr_a = b;
782        *ptr_b = a;
783        push_sv(stack, b_clone)
784    }
785}
786
787/// Duplicate top two values: ( a b -- a b a b )
788///
789/// # Safety
790/// Stack must have at least two values.
791#[unsafe(no_mangle)]
792pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
793    unsafe {
794        let sv_a = *stack.sub(2);
795        let sv_b = *stack.sub(1);
796        let a_clone = clone_stack_value(&sv_a);
797        let b_clone = clone_stack_value(&sv_b);
798        let sp = push_sv(stack, a_clone);
799        push_sv(sp, b_clone)
800    }
801}
802
803/// Drop top three values: ( a b c -- )
804///
805/// # Safety
806/// Stack must have at least three values.
807#[unsafe(no_mangle)]
808pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
809    unsafe {
810        let (sp, sv_c) = pop_sv(stack);
811        let (sp, sv_b) = pop_sv(sp);
812        let (sp, sv_a) = pop_sv(sp);
813        drop_stack_value(sv_c);
814        drop_stack_value(sv_b);
815        drop_stack_value(sv_a);
816        sp
817    }
818}
819
820/// Pick: Copy the nth value to the top
821/// ( ... xn ... x1 x0 n -- ... xn ... x1 x0 xn )
822///
823/// # Safety
824/// Stack must have at least n+1 values (plus the index value).
825///
826/// # Errors
827/// Sets runtime error if:
828/// - The top value is not an Int
829/// - n is negative
830/// - n exceeds the current stack depth
831#[unsafe(no_mangle)]
832pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
833    unsafe {
834        // Get n from top of stack
835        let (sp, n_val) = pop(stack);
836        let n_raw = match n_val {
837            Value::Int(i) => i,
838            _ => {
839                // Value already consumed by pop, return sp (index consumed)
840                crate::error::set_runtime_error("pick: expected Int index on top of stack");
841                return sp;
842            }
843        };
844
845        // Bounds check: n must be non-negative
846        if n_raw < 0 {
847            crate::error::set_runtime_error(format!(
848                "pick: index cannot be negative (got {})",
849                n_raw
850            ));
851            return sp; // Return stack with index consumed
852        }
853        let n = n_raw as usize;
854
855        // Check stack depth to prevent out-of-bounds access
856        let base = get_stack_base();
857        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
858        if n >= depth {
859            crate::error::set_runtime_error(format!(
860                "pick: index {} exceeds stack depth {} (need at least {} values)",
861                n,
862                depth,
863                n + 1
864            ));
865            return sp; // Return stack with index consumed
866        }
867
868        // Get the value at depth n (0 = top after popping n)
869        let sv = *sp.sub(n + 1);
870        let cloned = clone_stack_value(&sv);
871        push_sv(sp, cloned)
872    }
873}
874
875/// Roll: Rotate n+1 items, bringing the item at depth n to the top
876/// ( x_n x_(n-1) ... x_1 x_0 n -- x_(n-1) ... x_1 x_0 x_n )
877///
878/// # Safety
879/// Stack must have at least n+1 values (plus the index value).
880///
881/// # Errors
882/// Sets runtime error if:
883/// - The top value is not an Int
884/// - n is negative
885/// - n exceeds the current stack depth
886#[unsafe(no_mangle)]
887pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
888    unsafe {
889        // Get n from top of stack
890        let (sp, n_val) = pop(stack);
891        let n_raw = match n_val {
892            Value::Int(i) => i,
893            _ => {
894                // Value already consumed by pop, return sp (index consumed)
895                crate::error::set_runtime_error("roll: expected Int index on top of stack");
896                return sp;
897            }
898        };
899
900        // Bounds check: n must be non-negative
901        if n_raw < 0 {
902            crate::error::set_runtime_error(format!(
903                "roll: index cannot be negative (got {})",
904                n_raw
905            ));
906            return sp; // Return stack with index consumed
907        }
908        let n = n_raw as usize;
909
910        if n == 0 {
911            return sp;
912        }
913        if n == 1 {
914            return patch_seq_swap(sp);
915        }
916        if n == 2 {
917            return patch_seq_rot(sp);
918        }
919
920        // Check stack depth to prevent out-of-bounds access
921        let base = get_stack_base();
922        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
923        if n >= depth {
924            crate::error::set_runtime_error(format!(
925                "roll: index {} exceeds stack depth {} (need at least {} values)",
926                n,
927                depth,
928                n + 1
929            ));
930            return sp; // Return stack with index consumed
931        }
932
933        // General case: save item at depth n, shift others, put saved at top
934        let src_ptr = sp.sub(n + 1);
935        let saved = *src_ptr;
936
937        // Shift items down: memmove from src+1 to src, n items
938        std::ptr::copy(src_ptr.add(1), src_ptr, n);
939
940        // Put saved item at top (sp - 1)
941        *sp.sub(1) = saved;
942
943        sp
944    }
945}
946
947/// Clone a stack segment
948///
949/// Clones `count` StackValues from src to dst, handling refcounts.
950///
951/// # Safety
952/// Both src and dst must be valid stack pointers with sufficient space for count values.
953pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
954    unsafe {
955        for i in 0..count {
956            let sv = *src.sub(count - i);
957            let cloned = clone_stack_value(&sv);
958            *dst.add(i) = cloned;
959        }
960    }
961}
962
963// ============================================================================
964// Coroutine-Local Stack Base Tracking (for spawn)
965// ============================================================================
966//
967// IMPORTANT: We use May's coroutine_local! instead of thread_local! because
968// May coroutines can migrate between OS threads. Using thread_local would cause
969// STACK_BASE to be lost when a coroutine is moved to a different thread.
970
971use std::cell::Cell;
972
973// Use coroutine-local storage that moves with the coroutine
974may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
975
976/// Set the current strand's stack base (called at strand entry)
977///
978/// # Safety
979/// Base pointer must be a valid stack pointer for the current strand.
980#[unsafe(no_mangle)]
981pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
982    STACK_BASE.with(|cell| {
983        cell.set(base as usize);
984    });
985}
986
987/// Get the current strand's stack base
988#[inline]
989pub fn get_stack_base() -> Stack {
990    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
991}
992
993/// Clone the current stack for spawning a child strand
994///
995/// Allocates a new stack buffer and copies all values from the current stack.
996/// Returns a pointer to the first value in the new stack (like Stack convention).
997///
998/// # Safety
999/// - Current stack must have a valid base set via set_stack_base
1000/// - sp must point to a valid position within the current stack
1001#[unsafe(no_mangle)]
1002pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
1003    unsafe {
1004        let (new_sp, _base) = clone_stack_with_base(sp);
1005        new_sp
1006    }
1007}
1008
1009/// Clone the current stack for spawning, returning both base and sp.
1010///
1011/// This is used by spawn to create a copy of the parent's stack for the child strand.
1012/// Returns (new_sp, new_base) so the spawn mechanism can set STACK_BASE for the child.
1013///
1014/// # Safety
1015/// Current stack must have a valid base set via set_stack_base and sp must point to a valid position.
1016pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
1017    let base = get_stack_base();
1018    if base.is_null() {
1019        panic!("clone_stack: stack base not set");
1020    }
1021
1022    // Calculate depth (number of values on stack)
1023    let depth = unsafe { sp.offset_from(base) as usize };
1024
1025    if depth == 0 {
1026        // Empty stack - still need to allocate a buffer
1027        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
1028        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
1029        let new_base = new_stack.base;
1030        std::mem::forget(new_stack); // Don't drop - caller owns memory
1031        return (new_base, new_base);
1032    }
1033
1034    // Allocate new stack with capacity for at least the current depth
1035    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
1036    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
1037    let new_stack = TaggedStack::new(capacity);
1038    let new_base = new_stack.base;
1039    std::mem::forget(new_stack); // Don't drop - caller owns memory
1040
1041    // Clone all values from base to sp
1042    unsafe {
1043        for i in 0..depth {
1044            let sv = &*base.add(i);
1045            let cloned = clone_stack_value(sv);
1046            *new_base.add(i) = cloned;
1047        }
1048    }
1049
1050    // Return both sp and base
1051    unsafe { (new_base.add(depth), new_base) }
1052}
1053
1054// ============================================================================
1055// Short Aliases for Internal/Test Use
1056// ============================================================================
1057
1058pub use patch_seq_2dup as two_dup;
1059pub use patch_seq_3drop as three_drop;
1060pub use patch_seq_dup as dup;
1061pub use patch_seq_nip as nip;
1062pub use patch_seq_over as over;
1063pub use patch_seq_pick_op as pick;
1064pub use patch_seq_roll as roll;
1065pub use patch_seq_rot as rot;
1066pub use patch_seq_swap as swap;
1067pub use patch_seq_tuck as tuck;
1068
1069// ============================================================================
1070// Stack Allocation Helpers
1071// ============================================================================
1072
1073/// Allocate a new stack with default capacity.
1074/// Returns a pointer to the base of the stack (where first push goes).
1075///
1076/// # Note
1077/// The returned stack is allocated but not tracked.
1078/// The memory will be leaked when the caller is done with it.
1079/// This is used for temporary stacks in quotation calls and tests.
1080pub fn alloc_stack() -> Stack {
1081    use crate::tagged_stack::TaggedStack;
1082    let stack = TaggedStack::with_default_capacity();
1083    let base = stack.base;
1084    std::mem::forget(stack); // Don't drop - caller owns memory
1085    base
1086}
1087
1088/// Allocate a new test stack and set it as the stack base
1089/// This is used in tests that need clone_stack to work
1090#[cfg(test)]
1091pub fn alloc_test_stack() -> Stack {
1092    let stack = alloc_stack();
1093    unsafe { patch_seq_set_stack_base(stack) };
1094    stack
1095}
1096
1097/// Dump all values on the stack (for REPL debugging)
1098///
1099/// Prints all stack values in a readable format, then clears the stack.
1100/// Returns the stack base (empty stack).
1101///
1102/// # Safety
1103/// - Stack base must have been set via set_stack_base
1104/// - sp must be a valid stack pointer
1105/// - All stack values between base and sp must be valid
1106#[unsafe(no_mangle)]
1107pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1108    let base = get_stack_base();
1109    if base.is_null() {
1110        eprintln!("[stack.dump: base not set]");
1111        return sp;
1112    }
1113
1114    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1115
1116    if depth == 0 {
1117        println!("stack:");
1118    } else {
1119        use std::io::Write;
1120        print!("stack: ");
1121        for i in 0..depth {
1122            if i > 0 {
1123                print!(" ");
1124            }
1125            unsafe {
1126                let sv = *base.add(i);
1127                print_stack_value(&sv);
1128            }
1129        }
1130        println!();
1131        // Flush stdout to ensure output is visible immediately
1132        // This prevents partial output if the program terminates unexpectedly
1133        let _ = std::io::stdout().flush();
1134
1135        // Drop all heap-allocated values to prevent memory leaks
1136        for i in 0..depth {
1137            unsafe {
1138                let sv = *base.add(i);
1139                drop_stack_value(sv);
1140            }
1141        }
1142    }
1143
1144    // Return base (empty stack)
1145    base
1146}
1147
1148/// Print a stack value in SON (Seq Object Notation) format
1149///
1150/// # Safety Requirements
1151/// The StackValue must be valid and not previously dropped. For strings,
1152/// the pointer (slot1) must point to valid, readable memory of length slot2.
1153/// This is guaranteed when called from stack.dump on freshly computed values.
1154fn print_stack_value(sv: &StackValue) {
1155    use crate::son::{SonConfig, value_to_son};
1156
1157    // Safety: We must clone the StackValue before converting to Value because
1158    // stack_value_to_value takes ownership of heap-allocated data (via Box::from_raw
1159    // for maps, Arc::from_raw for variants, etc.). Without cloning, the Value would
1160    // be dropped after printing, freeing the memory, and then the later drop_stack_value
1161    // loop would double-free. clone_stack_value properly handles refcounting.
1162    let cloned = unsafe { clone_stack_value(sv) };
1163    let value = unsafe { stack_value_to_value(cloned) };
1164    let son = value_to_son(&value, &SonConfig::compact());
1165    print!("{}", son);
1166}
1167
1168/// Macro to create a test stack
1169#[macro_export]
1170macro_rules! test_stack {
1171    () => {{
1172        use $crate::tagged_stack::StackValue;
1173        static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1174        unsafe { BUFFER.as_mut_ptr() }
1175    }};
1176}
1177
1178#[cfg(test)]
1179mod tests {
1180    use super::*;
1181
1182    #[test]
1183    fn test_pick_negative_index_sets_error() {
1184        unsafe {
1185            crate::error::clear_runtime_error();
1186            let stack = alloc_test_stack();
1187            let stack = push(stack, Value::Int(100)); // some value
1188            let stack = push(stack, Value::Int(-1)); // negative index
1189
1190            let _stack = patch_seq_pick_op(stack);
1191
1192            assert!(crate::error::has_runtime_error());
1193            let error = crate::error::take_runtime_error().unwrap();
1194            assert!(error.contains("negative"));
1195        }
1196    }
1197
1198    #[test]
1199    fn test_pick_out_of_bounds_sets_error() {
1200        unsafe {
1201            crate::error::clear_runtime_error();
1202            let stack = alloc_test_stack();
1203            let stack = push(stack, Value::Int(100)); // only one value
1204            let stack = push(stack, Value::Int(10)); // index way too large
1205
1206            let _stack = patch_seq_pick_op(stack);
1207
1208            assert!(crate::error::has_runtime_error());
1209            let error = crate::error::take_runtime_error().unwrap();
1210            assert!(error.contains("exceeds stack depth"));
1211        }
1212    }
1213
1214    #[test]
1215    fn test_roll_negative_index_sets_error() {
1216        unsafe {
1217            crate::error::clear_runtime_error();
1218            let stack = alloc_test_stack();
1219            let stack = push(stack, Value::Int(100));
1220            let stack = push(stack, Value::Int(-1)); // negative index
1221
1222            let _stack = patch_seq_roll(stack);
1223
1224            assert!(crate::error::has_runtime_error());
1225            let error = crate::error::take_runtime_error().unwrap();
1226            assert!(error.contains("negative"));
1227        }
1228    }
1229
1230    #[test]
1231    fn test_roll_out_of_bounds_sets_error() {
1232        unsafe {
1233            crate::error::clear_runtime_error();
1234            let stack = alloc_test_stack();
1235            let stack = push(stack, Value::Int(100));
1236            let stack = push(stack, Value::Int(10)); // index way too large
1237
1238            let _stack = patch_seq_roll(stack);
1239
1240            assert!(crate::error::has_runtime_error());
1241            let error = crate::error::take_runtime_error().unwrap();
1242            assert!(error.contains("exceeds stack depth"));
1243        }
1244    }
1245}