seq_runtime/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! This module implements the stack using a contiguous array of 40-byte StackValue entries.
4//! Each StackValue has the layout: { slot0: discriminant, slot1-4: payload }
5//!
6//! The Stack type is a pointer to the "current position" (where next push goes).
7//! - Push: store at *sp, return sp + 1
8//! - Pop: return sp - 1, read from *(sp - 1)
9
10use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14/// Stack: A pointer to the current position in a contiguous array of StackValue.
15///
16/// Points to where the next value would be pushed.
17/// sp - 1 points to the top value, sp - 2 to second, etc.
18pub type Stack = *mut StackValue;
19
20/// Discriminant values matching codegen
21pub const DISC_INT: u64 = 0;
22pub const DISC_FLOAT: u64 = 1;
23pub const DISC_BOOL: u64 = 2;
24pub const DISC_STRING: u64 = 3;
25pub const DISC_VARIANT: u64 = 4;
26pub const DISC_MAP: u64 = 5;
27pub const DISC_QUOTATION: u64 = 6;
28pub const DISC_CLOSURE: u64 = 7;
29pub const DISC_CHANNEL: u64 = 8;
30pub const DISC_WEAVECTX: u64 = 9;
31pub const DISC_SYMBOL: u64 = 10;
32
33/// Convert a Value to a StackValue for pushing onto the tagged stack
34#[inline]
35pub fn value_to_stack_value(value: Value) -> StackValue {
36    match value {
37        Value::Int(i) => StackValue {
38            slot0: DISC_INT,
39            slot1: i as u64,
40            slot2: 0,
41            slot3: 0,
42            slot4: 0,
43        },
44        Value::Float(f) => StackValue {
45            slot0: DISC_FLOAT,
46            slot1: f.to_bits(),
47            slot2: 0,
48            slot3: 0,
49            slot4: 0,
50        },
51        Value::Bool(b) => StackValue {
52            slot0: DISC_BOOL,
53            slot1: if b { 1 } else { 0 },
54            slot2: 0,
55            slot3: 0,
56            slot4: 0,
57        },
58        Value::String(s) => {
59            // SeqString has: ptr, len, capacity, global
60            // Store these in slots 1-4
61            let (ptr, len, capacity, global) = s.into_raw_parts();
62            StackValue {
63                slot0: DISC_STRING,
64                slot1: ptr as u64,
65                slot2: len as u64,
66                slot3: capacity as u64,
67                slot4: if global { 1 } else { 0 },
68            }
69        }
70        Value::Symbol(s) => {
71            // Symbol uses the same SeqString representation as String
72            let (ptr, len, capacity, global) = s.into_raw_parts();
73            StackValue {
74                slot0: DISC_SYMBOL,
75                slot1: ptr as u64,
76                slot2: len as u64,
77                slot3: capacity as u64,
78                slot4: if global { 1 } else { 0 },
79            }
80        }
81        Value::Variant(v) => {
82            let ptr = Arc::into_raw(v) as u64;
83            StackValue {
84                slot0: DISC_VARIANT,
85                slot1: ptr,
86                slot2: 0,
87                slot3: 0,
88                slot4: 0,
89            }
90        }
91        Value::Map(m) => {
92            let ptr = Box::into_raw(m) as u64;
93            StackValue {
94                slot0: DISC_MAP,
95                slot1: ptr,
96                slot2: 0,
97                slot3: 0,
98                slot4: 0,
99            }
100        }
101        Value::Quotation { wrapper, impl_ } => StackValue {
102            slot0: DISC_QUOTATION,
103            slot1: wrapper as u64,
104            slot2: impl_ as u64,
105            slot3: 0,
106            slot4: 0,
107        },
108        Value::Closure { fn_ptr, env } => {
109            // Arc<[Value]> is a fat pointer - use Box to store it
110            let env_box = Box::new(env);
111            let env_ptr = Box::into_raw(env_box) as u64;
112            StackValue {
113                slot0: DISC_CLOSURE,
114                slot1: fn_ptr as u64,
115                slot2: env_ptr,
116                slot3: 0,
117                slot4: 0,
118            }
119        }
120        Value::Channel(ch) => {
121            // Store Arc<ChannelData> as raw pointer
122            let ptr = Arc::into_raw(ch) as u64;
123            StackValue {
124                slot0: DISC_CHANNEL,
125                slot1: ptr,
126                slot2: 0,
127                slot3: 0,
128                slot4: 0,
129            }
130        }
131        Value::WeaveCtx {
132            yield_chan,
133            resume_chan,
134        } => {
135            // Store both Arc<WeaveChannelData> as raw pointers
136            let yield_ptr = Arc::into_raw(yield_chan) as u64;
137            let resume_ptr = Arc::into_raw(resume_chan) as u64;
138            StackValue {
139                slot0: DISC_WEAVECTX,
140                slot1: yield_ptr,
141                slot2: resume_ptr,
142                slot3: 0,
143                slot4: 0,
144            }
145        }
146    }
147}
148
149/// Convert a StackValue back to a Value
150///
151/// # Safety
152/// The StackValue must contain valid data for its discriminant
153#[inline]
154pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
155    unsafe {
156        match sv.slot0 {
157            DISC_INT => Value::Int(sv.slot1 as i64),
158            DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
159            DISC_BOOL => Value::Bool(sv.slot1 != 0),
160            DISC_STRING => {
161                use crate::seqstring::SeqString;
162                let ptr = sv.slot1 as *const u8;
163                let len = sv.slot2 as usize;
164                let capacity = sv.slot3 as usize;
165                let global = sv.slot4 != 0;
166                Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
167            }
168            DISC_VARIANT => {
169                use crate::value::VariantData;
170                let arc = Arc::from_raw(sv.slot1 as *const VariantData);
171                Value::Variant(arc)
172            }
173            DISC_MAP => {
174                use crate::value::MapKey;
175                use std::collections::HashMap;
176                let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
177                Value::Map(boxed)
178            }
179            DISC_QUOTATION => Value::Quotation {
180                wrapper: sv.slot1 as usize,
181                impl_: sv.slot2 as usize,
182            },
183            DISC_CLOSURE => {
184                // Unbox the Arc<[Value]> that we boxed in value_to_stack_value
185                let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
186                Value::Closure {
187                    fn_ptr: sv.slot1 as usize,
188                    env: *env_box,
189                }
190            }
191            DISC_CHANNEL => {
192                use crate::value::ChannelData;
193                let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
194                Value::Channel(arc)
195            }
196            DISC_WEAVECTX => {
197                use crate::value::WeaveChannelData;
198                let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
199                let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
200                Value::WeaveCtx {
201                    yield_chan,
202                    resume_chan,
203                }
204            }
205            DISC_SYMBOL => {
206                use crate::seqstring::SeqString;
207                let ptr = sv.slot1 as *const u8;
208                let len = sv.slot2 as usize;
209                let capacity = sv.slot3 as usize;
210                let global = sv.slot4 != 0;
211                Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
212            }
213            _ => panic!("Invalid discriminant: {}", sv.slot0),
214        }
215    }
216}
217
218/// Clone a StackValue from LLVM IR - reads from src pointer, writes to dst pointer.
219/// This is the FFI-callable version for inline codegen that avoids ABI issues
220/// with passing large structs by value.
221///
222/// # Safety
223/// Both src and dst pointers must be valid and properly aligned StackValue pointers.
224#[unsafe(no_mangle)]
225pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
226    unsafe {
227        let sv = &*src;
228        let cloned = clone_stack_value(sv);
229        *dst = cloned;
230    }
231}
232
233/// Clone a StackValue, handling reference counting for heap types.
234///
235/// # Cloning Strategy by Type
236/// - **Int, Float, Bool, Quotation**: Bitwise copy (no heap allocation)
237/// - **String**: Deep copy to a new global (refcounted) string. This is necessary
238///   because the source may be an arena-allocated string that would become invalid
239///   when the arena resets. Global strings are heap-allocated with Arc refcounting.
240/// - **Variant**: Arc refcount increment (O(1), shares underlying data)
241/// - **Map**: Deep clone of the HashMap and all contained values
242/// - **Closure**: Deep clone of the Arc<[Value]> environment
243/// - **Channel**: Arc refcount increment (O(1), shares underlying sender/receiver)
244///
245/// # Safety
246/// The StackValue must contain valid data for its discriminant.
247#[inline]
248pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
249    unsafe {
250        match sv.slot0 {
251            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
252            DISC_STRING => {
253                // Deep copy: arena strings may become invalid, so always create a global string
254                let ptr = sv.slot1 as *const u8;
255                let len = sv.slot2 as usize;
256                debug_assert!(!ptr.is_null(), "String pointer is null");
257                // Read the string content without taking ownership
258                let slice = std::slice::from_raw_parts(ptr, len);
259                // Validate UTF-8 in debug builds, skip in release for performance
260                #[cfg(debug_assertions)]
261                let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
262                #[cfg(not(debug_assertions))]
263                let s = std::str::from_utf8_unchecked(slice);
264                // Clone to a new global string
265                let cloned = crate::seqstring::global_string(s.to_string());
266                let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
267                StackValue {
268                    slot0: DISC_STRING,
269                    slot1: new_ptr as u64,
270                    slot2: new_len as u64,
271                    slot3: new_cap as u64,
272                    slot4: if new_global { 1 } else { 0 },
273                }
274            }
275            DISC_VARIANT => {
276                use crate::value::VariantData;
277                let ptr = sv.slot1 as *const VariantData;
278                debug_assert!(!ptr.is_null(), "Variant pointer is null");
279                debug_assert!(
280                    (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
281                    "Variant pointer is misaligned"
282                );
283                let arc = Arc::from_raw(ptr);
284                let cloned = Arc::clone(&arc);
285                std::mem::forget(arc);
286                StackValue {
287                    slot0: DISC_VARIANT,
288                    slot1: Arc::into_raw(cloned) as u64,
289                    slot2: 0,
290                    slot3: 0,
291                    slot4: 0,
292                }
293            }
294            DISC_MAP => {
295                // Deep clone the map
296                use crate::value::MapKey;
297                use std::collections::HashMap;
298                let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
299                debug_assert!(!ptr.is_null(), "Map pointer is null");
300                debug_assert!(
301                    (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
302                    "Map pointer is misaligned"
303                );
304                let boxed = Box::from_raw(ptr);
305                let cloned = boxed.clone();
306                std::mem::forget(boxed);
307                StackValue {
308                    slot0: DISC_MAP,
309                    slot1: Box::into_raw(cloned) as u64,
310                    slot2: 0,
311                    slot3: 0,
312                    slot4: 0,
313                }
314            }
315            DISC_CLOSURE => {
316                // The env is stored as Box<Arc<[Value]>>
317                let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
318                debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
319                debug_assert!(
320                    (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
321                    "Closure env pointer is misaligned"
322                );
323                let env_arc = &*env_box_ptr;
324                let cloned_env = Arc::clone(env_arc);
325                // Box the cloned Arc
326                let new_env_box = Box::new(cloned_env);
327                StackValue {
328                    slot0: DISC_CLOSURE,
329                    slot1: sv.slot1,
330                    slot2: Box::into_raw(new_env_box) as u64,
331                    slot3: 0,
332                    slot4: 0,
333                }
334            }
335            DISC_CHANNEL => {
336                // Arc refcount increment - O(1) clone
337                use crate::value::ChannelData;
338                let ptr = sv.slot1 as *const ChannelData;
339                debug_assert!(!ptr.is_null(), "Channel pointer is null");
340                let arc = Arc::from_raw(ptr);
341                let cloned = Arc::clone(&arc);
342                std::mem::forget(arc);
343                StackValue {
344                    slot0: DISC_CHANNEL,
345                    slot1: Arc::into_raw(cloned) as u64,
346                    slot2: 0,
347                    slot3: 0,
348                    slot4: 0,
349                }
350            }
351            DISC_WEAVECTX => {
352                // Arc refcount increment for both channels - O(1) clone
353                use crate::value::WeaveChannelData;
354                let yield_ptr = sv.slot1 as *const WeaveChannelData;
355                let resume_ptr = sv.slot2 as *const WeaveChannelData;
356                debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
357                debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
358                let yield_arc = Arc::from_raw(yield_ptr);
359                let resume_arc = Arc::from_raw(resume_ptr);
360                let yield_cloned = Arc::clone(&yield_arc);
361                let resume_cloned = Arc::clone(&resume_arc);
362                std::mem::forget(yield_arc);
363                std::mem::forget(resume_arc);
364                StackValue {
365                    slot0: DISC_WEAVECTX,
366                    slot1: Arc::into_raw(yield_cloned) as u64,
367                    slot2: Arc::into_raw(resume_cloned) as u64,
368                    slot3: 0,
369                    slot4: 0,
370                }
371            }
372            DISC_SYMBOL => {
373                let capacity = sv.slot3 as usize;
374                let is_global = sv.slot4 != 0;
375
376                // Fast path (Issue #166): interned symbols can share the static pointer
377                // Interned symbols have capacity=0 and global=true
378                if capacity == 0 && is_global {
379                    let ptr = sv.slot1 as *const u8;
380                    let len = sv.slot2 as usize;
381
382                    // Safety: Interned symbols are guaranteed to point to valid static data.
383                    // The compiler generates these in get_symbol_global(), which always
384                    // creates valid string globals. A null pointer here indicates compiler bug.
385                    debug_assert!(
386                        !ptr.is_null(),
387                        "Interned symbol has null pointer in clone fast path"
388                    );
389
390                    // Create a new SeqString that shares the static data.
391                    // This properly maintains ownership semantics even though
392                    // Drop is a no-op for capacity=0 symbols.
393                    let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
394                    let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
395                    StackValue {
396                        slot0: DISC_SYMBOL,
397                        slot1: new_ptr as u64,
398                        slot2: new_len as u64,
399                        slot3: new_cap as u64,
400                        slot4: if new_global { 1 } else { 0 },
401                    }
402                } else {
403                    // Deep copy: arena symbols may become invalid
404                    let ptr = sv.slot1 as *const u8;
405                    let len = sv.slot2 as usize;
406                    debug_assert!(!ptr.is_null(), "Symbol pointer is null");
407                    let slice = std::slice::from_raw_parts(ptr, len);
408                    #[cfg(debug_assertions)]
409                    let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
410                    #[cfg(not(debug_assertions))]
411                    let s = std::str::from_utf8_unchecked(slice);
412                    let cloned = crate::seqstring::global_string(s.to_string());
413                    let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
414                    StackValue {
415                        slot0: DISC_SYMBOL,
416                        slot1: new_ptr as u64,
417                        slot2: new_len as u64,
418                        slot3: new_cap as u64,
419                        slot4: if new_global { 1 } else { 0 },
420                    }
421                }
422            }
423            _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
424        }
425    }
426}
427
428/// Drop a StackValue, decrementing refcounts for heap types
429///
430/// # Safety
431/// The StackValue must contain valid data for its discriminant.
432#[inline]
433pub unsafe fn drop_stack_value(sv: StackValue) {
434    unsafe {
435        match sv.slot0 {
436            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
437                // No heap allocation, nothing to drop
438            }
439            DISC_STRING => {
440                // Reconstruct SeqString and let it drop
441                use crate::seqstring::SeqString;
442                let ptr = sv.slot1 as *const u8;
443                let len = sv.slot2 as usize;
444                let capacity = sv.slot3 as usize;
445                let global = sv.slot4 != 0;
446                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
447                // SeqString::drop will free global strings, ignore arena strings
448            }
449            DISC_VARIANT => {
450                use crate::value::VariantData;
451                let _ = Arc::from_raw(sv.slot1 as *const VariantData);
452            }
453            DISC_MAP => {
454                use crate::value::MapKey;
455                use std::collections::HashMap;
456                let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
457            }
458            DISC_CLOSURE => {
459                // Unbox and drop the Arc<[Value]>
460                let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
461            }
462            DISC_CHANNEL => {
463                use crate::value::ChannelData;
464                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
465            }
466            DISC_WEAVECTX => {
467                use crate::value::WeaveChannelData;
468                let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
469                let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
470            }
471            DISC_SYMBOL => {
472                // Reconstruct SeqString and let it drop (same as String)
473                use crate::seqstring::SeqString;
474                let ptr = sv.slot1 as *const u8;
475                let len = sv.slot2 as usize;
476                let capacity = sv.slot3 as usize;
477                let global = sv.slot4 != 0;
478                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
479            }
480            _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
481        }
482    }
483}
484
485// ============================================================================
486// Core Stack Operations
487// ============================================================================
488
489/// Push a value onto the stack
490///
491/// Stores the value at the current stack pointer and returns sp + 1.
492///
493/// # Safety
494/// Stack pointer must be valid and have room for the value.
495#[inline]
496pub unsafe fn push(stack: Stack, value: Value) -> Stack {
497    unsafe {
498        let sv = value_to_stack_value(value);
499        *stack = sv;
500        stack.add(1)
501    }
502}
503
504/// Push a StackValue directly onto the stack
505///
506/// # Safety
507/// Stack pointer must be valid and have room for the value.
508#[inline]
509pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
510    unsafe {
511        *stack = sv;
512        stack.add(1)
513    }
514}
515
516/// Pop a value from the stack
517///
518/// Returns (new_sp, value) where new_sp = sp - 1.
519///
520/// # Safety
521/// Stack must not be at base (must have at least one value).
522#[inline]
523pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
524    unsafe {
525        let new_sp = stack.sub(1);
526        let sv = *new_sp;
527        (new_sp, stack_value_to_value(sv))
528    }
529}
530
531/// Pop a StackValue directly from the stack
532///
533/// # Safety
534/// Stack must not be at base and must have at least one value.
535#[inline]
536pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
537    unsafe {
538        let new_sp = stack.sub(1);
539        let sv = *new_sp;
540        (new_sp, sv)
541    }
542}
543
544/// Pop two values from the stack (for binary operations)
545///
546/// Returns (new_sp, a, b) where a was below b on stack.
547/// Stack effect: ( a b -- ) returns (a, b)
548///
549/// # Safety
550/// Stack must have at least two values.
551#[inline]
552pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
553    unsafe {
554        let (sp, b) = pop(stack);
555        let (sp, a) = pop(sp);
556        (sp, a, b)
557    }
558}
559
560/// Pop three values from the stack (for ternary operations)
561///
562/// # Safety
563/// Stack must have at least three values.
564#[inline]
565pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
566    unsafe {
567        let (sp, c) = pop(stack);
568        let (sp, b) = pop(sp);
569        let (sp, a) = pop(sp);
570        (sp, a, b, c)
571    }
572}
573
574/// Peek at the top value without removing it
575///
576/// # Safety
577/// Stack must have at least one value.
578#[inline]
579pub unsafe fn peek(stack: Stack) -> Value {
580    unsafe {
581        let sv = *stack.sub(1);
582        // Don't consume - need to clone for heap types
583        stack_value_to_value(clone_stack_value(&sv))
584    }
585}
586
587/// Peek at the raw StackValue without removing it
588///
589/// # Safety
590/// Stack must have at least one value.
591#[inline]
592pub unsafe fn peek_sv(stack: Stack) -> StackValue {
593    unsafe { *stack.sub(1) }
594}
595
596/// Check if stack is empty (at base pointer)
597/// Note: With tagged stack, we need to compare against base, not null
598#[inline]
599pub fn is_empty(_stack: Stack) -> bool {
600    // For now, assume stack is never truly empty in valid programs
601    // The caller should track base pointer if needed
602    false
603}
604
605// ============================================================================
606// FFI Stack Operations
607// ============================================================================
608
609/// Duplicate the top value on the stack: ( a -- a a )
610///
611/// # Safety
612/// Stack must have at least one value.
613#[unsafe(no_mangle)]
614pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
615    unsafe {
616        let sv = peek_sv(stack);
617        let cloned = clone_stack_value(&sv);
618        push_sv(stack, cloned)
619    }
620}
621
622/// Drop the top value from the stack: ( a -- )
623///
624/// # Safety
625/// Stack must have at least one value.
626#[inline]
627pub unsafe fn drop_top(stack: Stack) -> Stack {
628    unsafe {
629        let (new_sp, sv) = pop_sv(stack);
630        drop_stack_value(sv);
631        new_sp
632    }
633}
634
635/// Alias for drop to avoid LLVM keyword conflicts
636///
637/// # Safety
638/// Stack must have at least one value.
639#[unsafe(no_mangle)]
640pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
641    unsafe { drop_top(stack) }
642}
643
644/// Push an arbitrary Value onto the stack (for LLVM codegen)
645///
646/// # Safety
647/// Stack pointer must be valid and have room for the value.
648#[allow(improper_ctypes_definitions)]
649#[unsafe(no_mangle)]
650pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
651    unsafe { push(stack, value) }
652}
653
654/// Swap the top two values: ( a b -- b a )
655///
656/// # Safety
657/// Stack must have at least two values.
658#[unsafe(no_mangle)]
659pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
660    unsafe {
661        let ptr_b = stack.sub(1);
662        let ptr_a = stack.sub(2);
663        let a = *ptr_a;
664        let b = *ptr_b;
665        *ptr_a = b;
666        *ptr_b = a;
667        stack
668    }
669}
670
671/// Copy the second value to the top: ( a b -- a b a )
672///
673/// # Safety
674/// Stack must have at least two values.
675#[unsafe(no_mangle)]
676pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
677    unsafe {
678        let sv_a = *stack.sub(2);
679        let cloned = clone_stack_value(&sv_a);
680        push_sv(stack, cloned)
681    }
682}
683
684/// Rotate the top three values: ( a b c -- b c a )
685///
686/// # Safety
687/// Stack must have at least three values.
688#[unsafe(no_mangle)]
689pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
690    unsafe {
691        let ptr_c = stack.sub(1);
692        let ptr_b = stack.sub(2);
693        let ptr_a = stack.sub(3);
694        let a = *ptr_a;
695        let b = *ptr_b;
696        let c = *ptr_c;
697        *ptr_a = b;
698        *ptr_b = c;
699        *ptr_c = a;
700        stack
701    }
702}
703
704/// Remove the second value: ( a b -- b )
705///
706/// # Safety
707/// Stack must have at least two values.
708#[unsafe(no_mangle)]
709pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
710    unsafe {
711        let ptr_b = stack.sub(1);
712        let ptr_a = stack.sub(2);
713        let a = *ptr_a;
714        let b = *ptr_b;
715        drop_stack_value(a);
716        *ptr_a = b;
717        stack.sub(1)
718    }
719}
720
721/// Copy top value below second value: ( a b -- b a b )
722///
723/// # Safety
724/// Stack must have at least two values.
725#[unsafe(no_mangle)]
726pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
727    unsafe {
728        let ptr_b = stack.sub(1);
729        let ptr_a = stack.sub(2);
730        let a = *ptr_a;
731        let b = *ptr_b;
732        let b_clone = clone_stack_value(&b);
733        *ptr_a = b;
734        *ptr_b = a;
735        push_sv(stack, b_clone)
736    }
737}
738
739/// Duplicate top two values: ( a b -- a b a b )
740///
741/// # Safety
742/// Stack must have at least two values.
743#[unsafe(no_mangle)]
744pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
745    unsafe {
746        let sv_a = *stack.sub(2);
747        let sv_b = *stack.sub(1);
748        let a_clone = clone_stack_value(&sv_a);
749        let b_clone = clone_stack_value(&sv_b);
750        let sp = push_sv(stack, a_clone);
751        push_sv(sp, b_clone)
752    }
753}
754
755/// Drop top three values: ( a b c -- )
756///
757/// # Safety
758/// Stack must have at least three values.
759#[unsafe(no_mangle)]
760pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
761    unsafe {
762        let (sp, sv_c) = pop_sv(stack);
763        let (sp, sv_b) = pop_sv(sp);
764        let (sp, sv_a) = pop_sv(sp);
765        drop_stack_value(sv_c);
766        drop_stack_value(sv_b);
767        drop_stack_value(sv_a);
768        sp
769    }
770}
771
772/// Pick: Copy the nth value to the top
773/// ( ... xn ... x1 x0 n -- ... xn ... x1 x0 xn )
774///
775/// # Safety
776/// Stack must have at least n+1 values (plus the index value).
777///
778/// # Panics
779/// - If the top value is not an Int
780/// - If n is negative
781/// - If n exceeds the current stack depth
782#[unsafe(no_mangle)]
783pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
784    unsafe {
785        // Get n from top of stack
786        let (sp, n_val) = pop(stack);
787        let n_raw = match n_val {
788            Value::Int(i) => i,
789            _ => panic!("pick: expected Int"),
790        };
791
792        // Bounds check: n must be non-negative
793        if n_raw < 0 {
794            panic!("pick: index cannot be negative (got {})", n_raw);
795        }
796        let n = n_raw as usize;
797
798        // Check stack depth to prevent out-of-bounds access
799        let base = get_stack_base();
800        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
801        if n >= depth {
802            panic!(
803                "pick: index {} exceeds stack depth {} (need at least {} values)",
804                n,
805                depth,
806                n + 1
807            );
808        }
809
810        // Get the value at depth n (0 = top after popping n)
811        let sv = *sp.sub(n + 1);
812        let cloned = clone_stack_value(&sv);
813        push_sv(sp, cloned)
814    }
815}
816
817/// Roll: Rotate n+1 items, bringing the item at depth n to the top
818/// ( x_n x_(n-1) ... x_1 x_0 n -- x_(n-1) ... x_1 x_0 x_n )
819///
820/// # Safety
821/// Stack must have at least n+1 values (plus the index value).
822///
823/// # Panics
824/// - If the top value is not an Int
825/// - If n is negative
826/// - If n exceeds the current stack depth
827#[unsafe(no_mangle)]
828pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
829    unsafe {
830        // Get n from top of stack
831        let (sp, n_val) = pop(stack);
832        let n_raw = match n_val {
833            Value::Int(i) => i,
834            _ => panic!("roll: expected Int"),
835        };
836
837        // Bounds check: n must be non-negative
838        if n_raw < 0 {
839            panic!("roll: index cannot be negative (got {})", n_raw);
840        }
841        let n = n_raw as usize;
842
843        if n == 0 {
844            return sp;
845        }
846        if n == 1 {
847            return patch_seq_swap(sp);
848        }
849        if n == 2 {
850            return patch_seq_rot(sp);
851        }
852
853        // Check stack depth to prevent out-of-bounds access
854        let base = get_stack_base();
855        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
856        if n >= depth {
857            panic!(
858                "roll: index {} exceeds stack depth {} (need at least {} values)",
859                n,
860                depth,
861                n + 1
862            );
863        }
864
865        // General case: save item at depth n, shift others, put saved at top
866        let src_ptr = sp.sub(n + 1);
867        let saved = *src_ptr;
868
869        // Shift items down: memmove from src+1 to src, n items
870        std::ptr::copy(src_ptr.add(1), src_ptr, n);
871
872        // Put saved item at top (sp - 1)
873        *sp.sub(1) = saved;
874
875        sp
876    }
877}
878
879/// Clone a stack segment
880///
881/// Clones `count` StackValues from src to dst, handling refcounts.
882///
883/// # Safety
884/// Both src and dst must be valid stack pointers with sufficient space for count values.
885pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
886    unsafe {
887        for i in 0..count {
888            let sv = *src.sub(count - i);
889            let cloned = clone_stack_value(&sv);
890            *dst.add(i) = cloned;
891        }
892    }
893}
894
895// ============================================================================
896// Coroutine-Local Stack Base Tracking (for spawn)
897// ============================================================================
898//
899// IMPORTANT: We use May's coroutine_local! instead of thread_local! because
900// May coroutines can migrate between OS threads. Using thread_local would cause
901// STACK_BASE to be lost when a coroutine is moved to a different thread.
902
903use std::cell::Cell;
904
905// Use coroutine-local storage that moves with the coroutine
906may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
907
908/// Set the current strand's stack base (called at strand entry)
909///
910/// # Safety
911/// Base pointer must be a valid stack pointer for the current strand.
912#[unsafe(no_mangle)]
913pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
914    STACK_BASE.with(|cell| {
915        cell.set(base as usize);
916    });
917}
918
919/// Get the current strand's stack base
920#[inline]
921pub fn get_stack_base() -> Stack {
922    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
923}
924
925/// Clone the current stack for spawning a child strand
926///
927/// Allocates a new stack buffer and copies all values from the current stack.
928/// Returns a pointer to the first value in the new stack (like Stack convention).
929///
930/// # Safety
931/// - Current stack must have a valid base set via set_stack_base
932/// - sp must point to a valid position within the current stack
933#[unsafe(no_mangle)]
934pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
935    unsafe {
936        let (new_sp, _base) = clone_stack_with_base(sp);
937        new_sp
938    }
939}
940
941/// Clone the current stack for spawning, returning both base and sp.
942///
943/// This is used by spawn to create a copy of the parent's stack for the child strand.
944/// Returns (new_sp, new_base) so the spawn mechanism can set STACK_BASE for the child.
945///
946/// # Safety
947/// Current stack must have a valid base set via set_stack_base and sp must point to a valid position.
948pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
949    let base = get_stack_base();
950    if base.is_null() {
951        panic!("clone_stack: stack base not set");
952    }
953
954    // Calculate depth (number of values on stack)
955    let depth = unsafe { sp.offset_from(base) as usize };
956
957    if depth == 0 {
958        // Empty stack - still need to allocate a buffer
959        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
960        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
961        let new_base = new_stack.base;
962        std::mem::forget(new_stack); // Don't drop - caller owns memory
963        return (new_base, new_base);
964    }
965
966    // Allocate new stack with capacity for at least the current depth
967    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
968    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
969    let new_stack = TaggedStack::new(capacity);
970    let new_base = new_stack.base;
971    std::mem::forget(new_stack); // Don't drop - caller owns memory
972
973    // Clone all values from base to sp
974    unsafe {
975        for i in 0..depth {
976            let sv = &*base.add(i);
977            let cloned = clone_stack_value(sv);
978            *new_base.add(i) = cloned;
979        }
980    }
981
982    // Return both sp and base
983    unsafe { (new_base.add(depth), new_base) }
984}
985
986// ============================================================================
987// Short Aliases for Internal/Test Use
988// ============================================================================
989
990pub use patch_seq_2dup as two_dup;
991pub use patch_seq_3drop as three_drop;
992pub use patch_seq_dup as dup;
993pub use patch_seq_nip as nip;
994pub use patch_seq_over as over;
995pub use patch_seq_pick_op as pick;
996pub use patch_seq_roll as roll;
997pub use patch_seq_rot as rot;
998pub use patch_seq_swap as swap;
999pub use patch_seq_tuck as tuck;
1000
1001// ============================================================================
1002// Stack Allocation Helpers
1003// ============================================================================
1004
1005/// Allocate a new stack with default capacity.
1006/// Returns a pointer to the base of the stack (where first push goes).
1007///
1008/// # Note
1009/// The returned stack is allocated but not tracked.
1010/// The memory will be leaked when the caller is done with it.
1011/// This is used for temporary stacks in quotation calls and tests.
1012pub fn alloc_stack() -> Stack {
1013    use crate::tagged_stack::TaggedStack;
1014    let stack = TaggedStack::with_default_capacity();
1015    let base = stack.base;
1016    std::mem::forget(stack); // Don't drop - caller owns memory
1017    base
1018}
1019
1020/// Allocate a new test stack and set it as the stack base
1021/// This is used in tests that need clone_stack to work
1022#[cfg(test)]
1023pub fn alloc_test_stack() -> Stack {
1024    let stack = alloc_stack();
1025    unsafe { patch_seq_set_stack_base(stack) };
1026    stack
1027}
1028
1029/// Dump all values on the stack (for REPL debugging)
1030///
1031/// Prints all stack values in a readable format, then clears the stack.
1032/// Returns the stack base (empty stack).
1033///
1034/// # Safety
1035/// - Stack base must have been set via set_stack_base
1036/// - sp must be a valid stack pointer
1037/// - All stack values between base and sp must be valid
1038#[unsafe(no_mangle)]
1039pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1040    let base = get_stack_base();
1041    if base.is_null() {
1042        eprintln!("[stack.dump: base not set]");
1043        return sp;
1044    }
1045
1046    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1047
1048    if depth == 0 {
1049        println!("stack:");
1050    } else {
1051        use std::io::Write;
1052        print!("stack: ");
1053        for i in 0..depth {
1054            if i > 0 {
1055                print!(" ");
1056            }
1057            unsafe {
1058                let sv = *base.add(i);
1059                print_stack_value(&sv);
1060            }
1061        }
1062        println!();
1063        // Flush stdout to ensure output is visible immediately
1064        // This prevents partial output if the program terminates unexpectedly
1065        let _ = std::io::stdout().flush();
1066
1067        // Drop all heap-allocated values to prevent memory leaks
1068        for i in 0..depth {
1069            unsafe {
1070                let sv = *base.add(i);
1071                drop_stack_value(sv);
1072            }
1073        }
1074    }
1075
1076    // Return base (empty stack)
1077    base
1078}
1079
1080/// Print a stack value in SON (Seq Object Notation) format
1081///
1082/// # Safety Requirements
1083/// The StackValue must be valid and not previously dropped. For strings,
1084/// the pointer (slot1) must point to valid, readable memory of length slot2.
1085/// This is guaranteed when called from stack.dump on freshly computed values.
1086fn print_stack_value(sv: &StackValue) {
1087    use crate::son::{SonConfig, value_to_son};
1088
1089    // Safety: We must clone the StackValue before converting to Value because
1090    // stack_value_to_value takes ownership of heap-allocated data (via Box::from_raw
1091    // for maps, Arc::from_raw for variants, etc.). Without cloning, the Value would
1092    // be dropped after printing, freeing the memory, and then the later drop_stack_value
1093    // loop would double-free. clone_stack_value properly handles refcounting.
1094    let cloned = unsafe { clone_stack_value(sv) };
1095    let value = unsafe { stack_value_to_value(cloned) };
1096    let son = value_to_son(&value, &SonConfig::compact());
1097    print!("{}", son);
1098}
1099
1100/// Macro to create a test stack
1101#[macro_export]
1102macro_rules! test_stack {
1103    () => {{
1104        use $crate::tagged_stack::StackValue;
1105        static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1106        unsafe { BUFFER.as_mut_ptr() }
1107    }};
1108}