seq_runtime/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! This module implements the stack using a contiguous array of 40-byte StackValue entries.
4//! Each StackValue has the layout: { slot0: discriminant, slot1-4: payload }
5//!
6//! The Stack type is a pointer to the "current position" (where next push goes).
7//! - Push: store at *sp, return sp + 1
8//! - Pop: return sp - 1, read from *(sp - 1)
9
10use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14/// Stack: A pointer to the current position in a contiguous array of StackValue.
15///
16/// Points to where the next value would be pushed.
17/// sp - 1 points to the top value, sp - 2 to second, etc.
18pub type Stack = *mut StackValue;
19
20/// Returns the size of a StackValue in bytes (for stack depth calculations)
21#[inline]
22pub fn stack_value_size() -> usize {
23    std::mem::size_of::<StackValue>()
24}
25
26/// Discriminant values matching codegen
27pub const DISC_INT: u64 = 0;
28pub const DISC_FLOAT: u64 = 1;
29pub const DISC_BOOL: u64 = 2;
30pub const DISC_STRING: u64 = 3;
31pub const DISC_VARIANT: u64 = 4;
32pub const DISC_MAP: u64 = 5;
33pub const DISC_QUOTATION: u64 = 6;
34pub const DISC_CLOSURE: u64 = 7;
35pub const DISC_CHANNEL: u64 = 8;
36pub const DISC_WEAVECTX: u64 = 9;
37pub const DISC_SYMBOL: u64 = 10;
38
39/// Convert a Value to a StackValue for pushing onto the tagged stack
40#[inline]
41pub fn value_to_stack_value(value: Value) -> StackValue {
42    match value {
43        Value::Int(i) => StackValue {
44            slot0: DISC_INT,
45            slot1: i as u64,
46            slot2: 0,
47            slot3: 0,
48            slot4: 0,
49        },
50        Value::Float(f) => StackValue {
51            slot0: DISC_FLOAT,
52            slot1: f.to_bits(),
53            slot2: 0,
54            slot3: 0,
55            slot4: 0,
56        },
57        Value::Bool(b) => StackValue {
58            slot0: DISC_BOOL,
59            slot1: if b { 1 } else { 0 },
60            slot2: 0,
61            slot3: 0,
62            slot4: 0,
63        },
64        Value::String(s) => {
65            // SeqString has: ptr, len, capacity, global
66            // Store these in slots 1-4
67            let (ptr, len, capacity, global) = s.into_raw_parts();
68            StackValue {
69                slot0: DISC_STRING,
70                slot1: ptr as u64,
71                slot2: len as u64,
72                slot3: capacity as u64,
73                slot4: if global { 1 } else { 0 },
74            }
75        }
76        Value::Symbol(s) => {
77            // Symbol uses the same SeqString representation as String
78            let (ptr, len, capacity, global) = s.into_raw_parts();
79            StackValue {
80                slot0: DISC_SYMBOL,
81                slot1: ptr as u64,
82                slot2: len as u64,
83                slot3: capacity as u64,
84                slot4: if global { 1 } else { 0 },
85            }
86        }
87        Value::Variant(v) => {
88            let ptr = Arc::into_raw(v) as u64;
89            StackValue {
90                slot0: DISC_VARIANT,
91                slot1: ptr,
92                slot2: 0,
93                slot3: 0,
94                slot4: 0,
95            }
96        }
97        Value::Map(m) => {
98            let ptr = Box::into_raw(m) as u64;
99            StackValue {
100                slot0: DISC_MAP,
101                slot1: ptr,
102                slot2: 0,
103                slot3: 0,
104                slot4: 0,
105            }
106        }
107        Value::Quotation { wrapper, impl_ } => StackValue {
108            slot0: DISC_QUOTATION,
109            slot1: wrapper as u64,
110            slot2: impl_ as u64,
111            slot3: 0,
112            slot4: 0,
113        },
114        Value::Closure { fn_ptr, env } => {
115            // Arc<[Value]> is a fat pointer - use Box to store it
116            let env_box = Box::new(env);
117            let env_ptr = Box::into_raw(env_box) as u64;
118            StackValue {
119                slot0: DISC_CLOSURE,
120                slot1: fn_ptr as u64,
121                slot2: env_ptr,
122                slot3: 0,
123                slot4: 0,
124            }
125        }
126        Value::Channel(ch) => {
127            // Store Arc<ChannelData> as raw pointer
128            let ptr = Arc::into_raw(ch) as u64;
129            StackValue {
130                slot0: DISC_CHANNEL,
131                slot1: ptr,
132                slot2: 0,
133                slot3: 0,
134                slot4: 0,
135            }
136        }
137        Value::WeaveCtx {
138            yield_chan,
139            resume_chan,
140        } => {
141            // Store both Arc<WeaveChannelData> as raw pointers
142            let yield_ptr = Arc::into_raw(yield_chan) as u64;
143            let resume_ptr = Arc::into_raw(resume_chan) as u64;
144            StackValue {
145                slot0: DISC_WEAVECTX,
146                slot1: yield_ptr,
147                slot2: resume_ptr,
148                slot3: 0,
149                slot4: 0,
150            }
151        }
152    }
153}
154
155/// Convert a StackValue back to a Value
156///
157/// # Safety
158/// The StackValue must contain valid data for its discriminant
159#[inline]
160pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
161    unsafe {
162        match sv.slot0 {
163            DISC_INT => Value::Int(sv.slot1 as i64),
164            DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
165            DISC_BOOL => Value::Bool(sv.slot1 != 0),
166            DISC_STRING => {
167                use crate::seqstring::SeqString;
168                let ptr = sv.slot1 as *const u8;
169                let len = sv.slot2 as usize;
170                let capacity = sv.slot3 as usize;
171                let global = sv.slot4 != 0;
172                Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
173            }
174            DISC_VARIANT => {
175                use crate::value::VariantData;
176                let arc = Arc::from_raw(sv.slot1 as *const VariantData);
177                Value::Variant(arc)
178            }
179            DISC_MAP => {
180                use crate::value::MapKey;
181                use std::collections::HashMap;
182                let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
183                Value::Map(boxed)
184            }
185            DISC_QUOTATION => Value::Quotation {
186                wrapper: sv.slot1 as usize,
187                impl_: sv.slot2 as usize,
188            },
189            DISC_CLOSURE => {
190                // Unbox the Arc<[Value]> that we boxed in value_to_stack_value
191                let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
192                Value::Closure {
193                    fn_ptr: sv.slot1 as usize,
194                    env: *env_box,
195                }
196            }
197            DISC_CHANNEL => {
198                use crate::value::ChannelData;
199                let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
200                Value::Channel(arc)
201            }
202            DISC_WEAVECTX => {
203                use crate::value::WeaveChannelData;
204                let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
205                let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
206                Value::WeaveCtx {
207                    yield_chan,
208                    resume_chan,
209                }
210            }
211            DISC_SYMBOL => {
212                use crate::seqstring::SeqString;
213                let ptr = sv.slot1 as *const u8;
214                let len = sv.slot2 as usize;
215                let capacity = sv.slot3 as usize;
216                let global = sv.slot4 != 0;
217                Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
218            }
219            _ => panic!("Invalid discriminant: {}", sv.slot0),
220        }
221    }
222}
223
224/// Clone a StackValue from LLVM IR - reads from src pointer, writes to dst pointer.
225/// This is the FFI-callable version for inline codegen that avoids ABI issues
226/// with passing large structs by value.
227///
228/// # Safety
229/// Both src and dst pointers must be valid and properly aligned StackValue pointers.
230#[unsafe(no_mangle)]
231pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
232    unsafe {
233        let sv = &*src;
234        let cloned = clone_stack_value(sv);
235        *dst = cloned;
236    }
237}
238
239/// Clone a StackValue, handling reference counting for heap types.
240///
241/// # Cloning Strategy by Type
242/// - **Int, Float, Bool, Quotation**: Bitwise copy (no heap allocation)
243/// - **String**: Deep copy to a new global (refcounted) string. This is necessary
244///   because the source may be an arena-allocated string that would become invalid
245///   when the arena resets. Global strings are heap-allocated with Arc refcounting.
246/// - **Variant**: Arc refcount increment (O(1), shares underlying data)
247/// - **Map**: Deep clone of the HashMap and all contained values
248/// - **Closure**: Deep clone of the Arc<[Value]> environment
249/// - **Channel**: Arc refcount increment (O(1), shares underlying sender/receiver)
250///
251/// # Safety
252/// The StackValue must contain valid data for its discriminant.
253#[inline]
254pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
255    unsafe {
256        match sv.slot0 {
257            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
258            DISC_STRING => {
259                // Deep copy: arena strings may become invalid, so always create a global string
260                let ptr = sv.slot1 as *const u8;
261                let len = sv.slot2 as usize;
262                debug_assert!(!ptr.is_null(), "String pointer is null");
263                // Read the string content without taking ownership
264                let slice = std::slice::from_raw_parts(ptr, len);
265                // Validate UTF-8 in debug builds, skip in release for performance
266                #[cfg(debug_assertions)]
267                let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
268                #[cfg(not(debug_assertions))]
269                let s = std::str::from_utf8_unchecked(slice);
270                // Clone to a new global string
271                let cloned = crate::seqstring::global_string(s.to_string());
272                let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
273                StackValue {
274                    slot0: DISC_STRING,
275                    slot1: new_ptr as u64,
276                    slot2: new_len as u64,
277                    slot3: new_cap as u64,
278                    slot4: if new_global { 1 } else { 0 },
279                }
280            }
281            DISC_VARIANT => {
282                use crate::value::VariantData;
283                let ptr = sv.slot1 as *const VariantData;
284                debug_assert!(!ptr.is_null(), "Variant pointer is null");
285                debug_assert!(
286                    (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
287                    "Variant pointer is misaligned"
288                );
289                let arc = Arc::from_raw(ptr);
290                let cloned = Arc::clone(&arc);
291                std::mem::forget(arc);
292                StackValue {
293                    slot0: DISC_VARIANT,
294                    slot1: Arc::into_raw(cloned) as u64,
295                    slot2: 0,
296                    slot3: 0,
297                    slot4: 0,
298                }
299            }
300            DISC_MAP => {
301                // Deep clone the map
302                use crate::value::MapKey;
303                use std::collections::HashMap;
304                let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
305                debug_assert!(!ptr.is_null(), "Map pointer is null");
306                debug_assert!(
307                    (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
308                    "Map pointer is misaligned"
309                );
310                let boxed = Box::from_raw(ptr);
311                let cloned = boxed.clone();
312                std::mem::forget(boxed);
313                StackValue {
314                    slot0: DISC_MAP,
315                    slot1: Box::into_raw(cloned) as u64,
316                    slot2: 0,
317                    slot3: 0,
318                    slot4: 0,
319                }
320            }
321            DISC_CLOSURE => {
322                // The env is stored as Box<Arc<[Value]>>
323                let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
324                debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
325                debug_assert!(
326                    (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
327                    "Closure env pointer is misaligned"
328                );
329                let env_arc = &*env_box_ptr;
330                let cloned_env = Arc::clone(env_arc);
331                // Box the cloned Arc
332                let new_env_box = Box::new(cloned_env);
333                StackValue {
334                    slot0: DISC_CLOSURE,
335                    slot1: sv.slot1,
336                    slot2: Box::into_raw(new_env_box) as u64,
337                    slot3: 0,
338                    slot4: 0,
339                }
340            }
341            DISC_CHANNEL => {
342                // Arc refcount increment - O(1) clone
343                use crate::value::ChannelData;
344                let ptr = sv.slot1 as *const ChannelData;
345                debug_assert!(!ptr.is_null(), "Channel pointer is null");
346                let arc = Arc::from_raw(ptr);
347                let cloned = Arc::clone(&arc);
348                std::mem::forget(arc);
349                StackValue {
350                    slot0: DISC_CHANNEL,
351                    slot1: Arc::into_raw(cloned) as u64,
352                    slot2: 0,
353                    slot3: 0,
354                    slot4: 0,
355                }
356            }
357            DISC_WEAVECTX => {
358                // Arc refcount increment for both channels - O(1) clone
359                use crate::value::WeaveChannelData;
360                let yield_ptr = sv.slot1 as *const WeaveChannelData;
361                let resume_ptr = sv.slot2 as *const WeaveChannelData;
362                debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
363                debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
364                let yield_arc = Arc::from_raw(yield_ptr);
365                let resume_arc = Arc::from_raw(resume_ptr);
366                let yield_cloned = Arc::clone(&yield_arc);
367                let resume_cloned = Arc::clone(&resume_arc);
368                std::mem::forget(yield_arc);
369                std::mem::forget(resume_arc);
370                StackValue {
371                    slot0: DISC_WEAVECTX,
372                    slot1: Arc::into_raw(yield_cloned) as u64,
373                    slot2: Arc::into_raw(resume_cloned) as u64,
374                    slot3: 0,
375                    slot4: 0,
376                }
377            }
378            DISC_SYMBOL => {
379                let capacity = sv.slot3 as usize;
380                let is_global = sv.slot4 != 0;
381
382                // Fast path (Issue #166): interned symbols can share the static pointer
383                // Interned symbols have capacity=0 and global=true
384                if capacity == 0 && is_global {
385                    let ptr = sv.slot1 as *const u8;
386                    let len = sv.slot2 as usize;
387
388                    // Safety: Interned symbols are guaranteed to point to valid static data.
389                    // The compiler generates these in get_symbol_global(), which always
390                    // creates valid string globals. A null pointer here indicates compiler bug.
391                    debug_assert!(
392                        !ptr.is_null(),
393                        "Interned symbol has null pointer in clone fast path"
394                    );
395
396                    // Create a new SeqString that shares the static data.
397                    // This properly maintains ownership semantics even though
398                    // Drop is a no-op for capacity=0 symbols.
399                    let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
400                    let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
401                    StackValue {
402                        slot0: DISC_SYMBOL,
403                        slot1: new_ptr as u64,
404                        slot2: new_len as u64,
405                        slot3: new_cap as u64,
406                        slot4: if new_global { 1 } else { 0 },
407                    }
408                } else {
409                    // Deep copy: arena symbols may become invalid
410                    let ptr = sv.slot1 as *const u8;
411                    let len = sv.slot2 as usize;
412                    debug_assert!(!ptr.is_null(), "Symbol pointer is null");
413                    let slice = std::slice::from_raw_parts(ptr, len);
414                    #[cfg(debug_assertions)]
415                    let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
416                    #[cfg(not(debug_assertions))]
417                    let s = std::str::from_utf8_unchecked(slice);
418                    let cloned = crate::seqstring::global_string(s.to_string());
419                    let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
420                    StackValue {
421                        slot0: DISC_SYMBOL,
422                        slot1: new_ptr as u64,
423                        slot2: new_len as u64,
424                        slot3: new_cap as u64,
425                        slot4: if new_global { 1 } else { 0 },
426                    }
427                }
428            }
429            _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
430        }
431    }
432}
433
434/// Drop a StackValue, decrementing refcounts for heap types
435///
436/// # Safety
437/// The StackValue must contain valid data for its discriminant.
438#[inline]
439pub unsafe fn drop_stack_value(sv: StackValue) {
440    unsafe {
441        match sv.slot0 {
442            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
443                // No heap allocation, nothing to drop
444            }
445            DISC_STRING => {
446                // Reconstruct SeqString and let it drop
447                use crate::seqstring::SeqString;
448                let ptr = sv.slot1 as *const u8;
449                let len = sv.slot2 as usize;
450                let capacity = sv.slot3 as usize;
451                let global = sv.slot4 != 0;
452                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
453                // SeqString::drop will free global strings, ignore arena strings
454            }
455            DISC_VARIANT => {
456                use crate::value::VariantData;
457                let _ = Arc::from_raw(sv.slot1 as *const VariantData);
458            }
459            DISC_MAP => {
460                use crate::value::MapKey;
461                use std::collections::HashMap;
462                let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
463            }
464            DISC_CLOSURE => {
465                // Unbox and drop the Arc<[Value]>
466                let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
467            }
468            DISC_CHANNEL => {
469                use crate::value::ChannelData;
470                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
471            }
472            DISC_WEAVECTX => {
473                use crate::value::WeaveChannelData;
474                let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
475                let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
476            }
477            DISC_SYMBOL => {
478                // Reconstruct SeqString and let it drop (same as String)
479                use crate::seqstring::SeqString;
480                let ptr = sv.slot1 as *const u8;
481                let len = sv.slot2 as usize;
482                let capacity = sv.slot3 as usize;
483                let global = sv.slot4 != 0;
484                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485            }
486            _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
487        }
488    }
489}
490
491// ============================================================================
492// Core Stack Operations
493// ============================================================================
494
495/// Push a value onto the stack
496///
497/// Stores the value at the current stack pointer and returns sp + 1.
498///
499/// # Safety
500/// Stack pointer must be valid and have room for the value.
501#[inline]
502pub unsafe fn push(stack: Stack, value: Value) -> Stack {
503    unsafe {
504        let sv = value_to_stack_value(value);
505        *stack = sv;
506        stack.add(1)
507    }
508}
509
510/// Push a StackValue directly onto the stack
511///
512/// # Safety
513/// Stack pointer must be valid and have room for the value.
514#[inline]
515pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
516    unsafe {
517        *stack = sv;
518        stack.add(1)
519    }
520}
521
522/// Pop a value from the stack
523///
524/// Returns (new_sp, value) where new_sp = sp - 1.
525///
526/// # Safety
527/// Stack must not be at base (must have at least one value).
528#[inline]
529pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
530    unsafe {
531        let new_sp = stack.sub(1);
532        let sv = *new_sp;
533        (new_sp, stack_value_to_value(sv))
534    }
535}
536
537/// Pop a StackValue directly from the stack
538///
539/// # Safety
540/// Stack must not be at base and must have at least one value.
541#[inline]
542pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
543    unsafe {
544        let new_sp = stack.sub(1);
545        let sv = *new_sp;
546        (new_sp, sv)
547    }
548}
549
550/// Pop two values from the stack (for binary operations)
551///
552/// Returns (new_sp, a, b) where a was below b on stack.
553/// Stack effect: ( a b -- ) returns (a, b)
554///
555/// # Safety
556/// Stack must have at least two values.
557#[inline]
558pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
559    unsafe {
560        let (sp, b) = pop(stack);
561        let (sp, a) = pop(sp);
562        (sp, a, b)
563    }
564}
565
566/// Pop three values from the stack (for ternary operations)
567///
568/// # Safety
569/// Stack must have at least three values.
570#[inline]
571pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
572    unsafe {
573        let (sp, c) = pop(stack);
574        let (sp, b) = pop(sp);
575        let (sp, a) = pop(sp);
576        (sp, a, b, c)
577    }
578}
579
580/// Peek at the top value without removing it
581///
582/// # Safety
583/// Stack must have at least one value.
584#[inline]
585pub unsafe fn peek(stack: Stack) -> Value {
586    unsafe {
587        let sv = *stack.sub(1);
588        // Don't consume - need to clone for heap types
589        stack_value_to_value(clone_stack_value(&sv))
590    }
591}
592
593/// Peek at the raw StackValue without removing it
594///
595/// # Safety
596/// Stack must have at least one value.
597#[inline]
598pub unsafe fn peek_sv(stack: Stack) -> StackValue {
599    unsafe { *stack.sub(1) }
600}
601
602/// Check if stack is empty (at base pointer)
603/// Note: With tagged stack, we need to compare against base, not null
604#[inline]
605pub fn is_empty(_stack: Stack) -> bool {
606    // For now, assume stack is never truly empty in valid programs
607    // The caller should track base pointer if needed
608    false
609}
610
611// ============================================================================
612// FFI Stack Operations
613// ============================================================================
614
615/// Duplicate the top value on the stack: ( a -- a a )
616///
617/// # Safety
618/// Stack must have at least one value.
619#[unsafe(no_mangle)]
620pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
621    unsafe {
622        let sv = peek_sv(stack);
623        let cloned = clone_stack_value(&sv);
624        push_sv(stack, cloned)
625    }
626}
627
628/// Drop the top value from the stack: ( a -- )
629///
630/// # Safety
631/// Stack must have at least one value.
632#[inline]
633pub unsafe fn drop_top(stack: Stack) -> Stack {
634    unsafe {
635        let (new_sp, sv) = pop_sv(stack);
636        drop_stack_value(sv);
637        new_sp
638    }
639}
640
641/// Alias for drop to avoid LLVM keyword conflicts
642///
643/// # Safety
644/// Stack must have at least one value.
645#[unsafe(no_mangle)]
646pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
647    unsafe { drop_top(stack) }
648}
649
650/// Push an arbitrary Value onto the stack (for LLVM codegen)
651///
652/// # Safety
653/// Stack pointer must be valid and have room for the value.
654#[allow(improper_ctypes_definitions)]
655#[unsafe(no_mangle)]
656pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
657    unsafe { push(stack, value) }
658}
659
660/// Swap the top two values: ( a b -- b a )
661///
662/// # Safety
663/// Stack must have at least two values.
664#[unsafe(no_mangle)]
665pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
666    unsafe {
667        let ptr_b = stack.sub(1);
668        let ptr_a = stack.sub(2);
669        let a = *ptr_a;
670        let b = *ptr_b;
671        *ptr_a = b;
672        *ptr_b = a;
673        stack
674    }
675}
676
677/// Copy the second value to the top: ( a b -- a b a )
678///
679/// # Safety
680/// Stack must have at least two values.
681#[unsafe(no_mangle)]
682pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
683    unsafe {
684        let sv_a = *stack.sub(2);
685        let cloned = clone_stack_value(&sv_a);
686        push_sv(stack, cloned)
687    }
688}
689
690/// Rotate the top three values: ( a b c -- b c a )
691///
692/// # Safety
693/// Stack must have at least three values.
694#[unsafe(no_mangle)]
695pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
696    unsafe {
697        let ptr_c = stack.sub(1);
698        let ptr_b = stack.sub(2);
699        let ptr_a = stack.sub(3);
700        let a = *ptr_a;
701        let b = *ptr_b;
702        let c = *ptr_c;
703        *ptr_a = b;
704        *ptr_b = c;
705        *ptr_c = a;
706        stack
707    }
708}
709
710/// Remove the second value: ( a b -- b )
711///
712/// # Safety
713/// Stack must have at least two values.
714#[unsafe(no_mangle)]
715pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
716    unsafe {
717        let ptr_b = stack.sub(1);
718        let ptr_a = stack.sub(2);
719        let a = *ptr_a;
720        let b = *ptr_b;
721        drop_stack_value(a);
722        *ptr_a = b;
723        stack.sub(1)
724    }
725}
726
727/// Copy top value below second value: ( a b -- b a b )
728///
729/// # Safety
730/// Stack must have at least two values.
731#[unsafe(no_mangle)]
732pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
733    unsafe {
734        let ptr_b = stack.sub(1);
735        let ptr_a = stack.sub(2);
736        let a = *ptr_a;
737        let b = *ptr_b;
738        let b_clone = clone_stack_value(&b);
739        *ptr_a = b;
740        *ptr_b = a;
741        push_sv(stack, b_clone)
742    }
743}
744
745/// Duplicate top two values: ( a b -- a b a b )
746///
747/// # Safety
748/// Stack must have at least two values.
749#[unsafe(no_mangle)]
750pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
751    unsafe {
752        let sv_a = *stack.sub(2);
753        let sv_b = *stack.sub(1);
754        let a_clone = clone_stack_value(&sv_a);
755        let b_clone = clone_stack_value(&sv_b);
756        let sp = push_sv(stack, a_clone);
757        push_sv(sp, b_clone)
758    }
759}
760
761/// Drop top three values: ( a b c -- )
762///
763/// # Safety
764/// Stack must have at least three values.
765#[unsafe(no_mangle)]
766pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
767    unsafe {
768        let (sp, sv_c) = pop_sv(stack);
769        let (sp, sv_b) = pop_sv(sp);
770        let (sp, sv_a) = pop_sv(sp);
771        drop_stack_value(sv_c);
772        drop_stack_value(sv_b);
773        drop_stack_value(sv_a);
774        sp
775    }
776}
777
778/// Pick: Copy the nth value to the top
779/// ( ... xn ... x1 x0 n -- ... xn ... x1 x0 xn )
780///
781/// # Safety
782/// Stack must have at least n+1 values (plus the index value).
783///
784/// # Errors
785/// Sets runtime error if:
786/// - The top value is not an Int
787/// - n is negative
788/// - n exceeds the current stack depth
789#[unsafe(no_mangle)]
790pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
791    unsafe {
792        // Get n from top of stack
793        let (sp, n_val) = pop(stack);
794        let n_raw = match n_val {
795            Value::Int(i) => i,
796            _ => {
797                // Value already consumed by pop, return sp (index consumed)
798                crate::error::set_runtime_error("pick: expected Int index on top of stack");
799                return sp;
800            }
801        };
802
803        // Bounds check: n must be non-negative
804        if n_raw < 0 {
805            crate::error::set_runtime_error(format!(
806                "pick: index cannot be negative (got {})",
807                n_raw
808            ));
809            return sp; // Return stack with index consumed
810        }
811        let n = n_raw as usize;
812
813        // Check stack depth to prevent out-of-bounds access
814        let base = get_stack_base();
815        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
816        if n >= depth {
817            crate::error::set_runtime_error(format!(
818                "pick: index {} exceeds stack depth {} (need at least {} values)",
819                n,
820                depth,
821                n + 1
822            ));
823            return sp; // Return stack with index consumed
824        }
825
826        // Get the value at depth n (0 = top after popping n)
827        let sv = *sp.sub(n + 1);
828        let cloned = clone_stack_value(&sv);
829        push_sv(sp, cloned)
830    }
831}
832
833/// Roll: Rotate n+1 items, bringing the item at depth n to the top
834/// ( x_n x_(n-1) ... x_1 x_0 n -- x_(n-1) ... x_1 x_0 x_n )
835///
836/// # Safety
837/// Stack must have at least n+1 values (plus the index value).
838///
839/// # Errors
840/// Sets runtime error if:
841/// - The top value is not an Int
842/// - n is negative
843/// - n exceeds the current stack depth
844#[unsafe(no_mangle)]
845pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
846    unsafe {
847        // Get n from top of stack
848        let (sp, n_val) = pop(stack);
849        let n_raw = match n_val {
850            Value::Int(i) => i,
851            _ => {
852                // Value already consumed by pop, return sp (index consumed)
853                crate::error::set_runtime_error("roll: expected Int index on top of stack");
854                return sp;
855            }
856        };
857
858        // Bounds check: n must be non-negative
859        if n_raw < 0 {
860            crate::error::set_runtime_error(format!(
861                "roll: index cannot be negative (got {})",
862                n_raw
863            ));
864            return sp; // Return stack with index consumed
865        }
866        let n = n_raw as usize;
867
868        if n == 0 {
869            return sp;
870        }
871        if n == 1 {
872            return patch_seq_swap(sp);
873        }
874        if n == 2 {
875            return patch_seq_rot(sp);
876        }
877
878        // Check stack depth to prevent out-of-bounds access
879        let base = get_stack_base();
880        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
881        if n >= depth {
882            crate::error::set_runtime_error(format!(
883                "roll: index {} exceeds stack depth {} (need at least {} values)",
884                n,
885                depth,
886                n + 1
887            ));
888            return sp; // Return stack with index consumed
889        }
890
891        // General case: save item at depth n, shift others, put saved at top
892        let src_ptr = sp.sub(n + 1);
893        let saved = *src_ptr;
894
895        // Shift items down: memmove from src+1 to src, n items
896        std::ptr::copy(src_ptr.add(1), src_ptr, n);
897
898        // Put saved item at top (sp - 1)
899        *sp.sub(1) = saved;
900
901        sp
902    }
903}
904
905/// Clone a stack segment
906///
907/// Clones `count` StackValues from src to dst, handling refcounts.
908///
909/// # Safety
910/// Both src and dst must be valid stack pointers with sufficient space for count values.
911pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
912    unsafe {
913        for i in 0..count {
914            let sv = *src.sub(count - i);
915            let cloned = clone_stack_value(&sv);
916            *dst.add(i) = cloned;
917        }
918    }
919}
920
921// ============================================================================
922// Coroutine-Local Stack Base Tracking (for spawn)
923// ============================================================================
924//
925// IMPORTANT: We use May's coroutine_local! instead of thread_local! because
926// May coroutines can migrate between OS threads. Using thread_local would cause
927// STACK_BASE to be lost when a coroutine is moved to a different thread.
928
929use std::cell::Cell;
930
931// Use coroutine-local storage that moves with the coroutine
932may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
933
934/// Set the current strand's stack base (called at strand entry)
935///
936/// # Safety
937/// Base pointer must be a valid stack pointer for the current strand.
938#[unsafe(no_mangle)]
939pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
940    STACK_BASE.with(|cell| {
941        cell.set(base as usize);
942    });
943}
944
945/// Get the current strand's stack base
946#[inline]
947pub fn get_stack_base() -> Stack {
948    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
949}
950
951/// Clone the current stack for spawning a child strand
952///
953/// Allocates a new stack buffer and copies all values from the current stack.
954/// Returns a pointer to the first value in the new stack (like Stack convention).
955///
956/// # Safety
957/// - Current stack must have a valid base set via set_stack_base
958/// - sp must point to a valid position within the current stack
959#[unsafe(no_mangle)]
960pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
961    unsafe {
962        let (new_sp, _base) = clone_stack_with_base(sp);
963        new_sp
964    }
965}
966
967/// Clone the current stack for spawning, returning both base and sp.
968///
969/// This is used by spawn to create a copy of the parent's stack for the child strand.
970/// Returns (new_sp, new_base) so the spawn mechanism can set STACK_BASE for the child.
971///
972/// # Safety
973/// Current stack must have a valid base set via set_stack_base and sp must point to a valid position.
974pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
975    let base = get_stack_base();
976    if base.is_null() {
977        panic!("clone_stack: stack base not set");
978    }
979
980    // Calculate depth (number of values on stack)
981    let depth = unsafe { sp.offset_from(base) as usize };
982
983    if depth == 0 {
984        // Empty stack - still need to allocate a buffer
985        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
986        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
987        let new_base = new_stack.base;
988        std::mem::forget(new_stack); // Don't drop - caller owns memory
989        return (new_base, new_base);
990    }
991
992    // Allocate new stack with capacity for at least the current depth
993    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
994    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
995    let new_stack = TaggedStack::new(capacity);
996    let new_base = new_stack.base;
997    std::mem::forget(new_stack); // Don't drop - caller owns memory
998
999    // Clone all values from base to sp
1000    unsafe {
1001        for i in 0..depth {
1002            let sv = &*base.add(i);
1003            let cloned = clone_stack_value(sv);
1004            *new_base.add(i) = cloned;
1005        }
1006    }
1007
1008    // Return both sp and base
1009    unsafe { (new_base.add(depth), new_base) }
1010}
1011
1012// ============================================================================
1013// Short Aliases for Internal/Test Use
1014// ============================================================================
1015
1016pub use patch_seq_2dup as two_dup;
1017pub use patch_seq_3drop as three_drop;
1018pub use patch_seq_dup as dup;
1019pub use patch_seq_nip as nip;
1020pub use patch_seq_over as over;
1021pub use patch_seq_pick_op as pick;
1022pub use patch_seq_roll as roll;
1023pub use patch_seq_rot as rot;
1024pub use patch_seq_swap as swap;
1025pub use patch_seq_tuck as tuck;
1026
1027// ============================================================================
1028// Stack Allocation Helpers
1029// ============================================================================
1030
1031/// Allocate a new stack with default capacity.
1032/// Returns a pointer to the base of the stack (where first push goes).
1033///
1034/// # Note
1035/// The returned stack is allocated but not tracked.
1036/// The memory will be leaked when the caller is done with it.
1037/// This is used for temporary stacks in quotation calls and tests.
1038pub fn alloc_stack() -> Stack {
1039    use crate::tagged_stack::TaggedStack;
1040    let stack = TaggedStack::with_default_capacity();
1041    let base = stack.base;
1042    std::mem::forget(stack); // Don't drop - caller owns memory
1043    base
1044}
1045
1046/// Allocate a new test stack and set it as the stack base
1047/// This is used in tests that need clone_stack to work
1048#[cfg(test)]
1049pub fn alloc_test_stack() -> Stack {
1050    let stack = alloc_stack();
1051    unsafe { patch_seq_set_stack_base(stack) };
1052    stack
1053}
1054
1055/// Dump all values on the stack (for REPL debugging)
1056///
1057/// Prints all stack values in a readable format, then clears the stack.
1058/// Returns the stack base (empty stack).
1059///
1060/// # Safety
1061/// - Stack base must have been set via set_stack_base
1062/// - sp must be a valid stack pointer
1063/// - All stack values between base and sp must be valid
1064#[unsafe(no_mangle)]
1065pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1066    let base = get_stack_base();
1067    if base.is_null() {
1068        eprintln!("[stack.dump: base not set]");
1069        return sp;
1070    }
1071
1072    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1073
1074    if depth == 0 {
1075        println!("stack:");
1076    } else {
1077        use std::io::Write;
1078        print!("stack: ");
1079        for i in 0..depth {
1080            if i > 0 {
1081                print!(" ");
1082            }
1083            unsafe {
1084                let sv = *base.add(i);
1085                print_stack_value(&sv);
1086            }
1087        }
1088        println!();
1089        // Flush stdout to ensure output is visible immediately
1090        // This prevents partial output if the program terminates unexpectedly
1091        let _ = std::io::stdout().flush();
1092
1093        // Drop all heap-allocated values to prevent memory leaks
1094        for i in 0..depth {
1095            unsafe {
1096                let sv = *base.add(i);
1097                drop_stack_value(sv);
1098            }
1099        }
1100    }
1101
1102    // Return base (empty stack)
1103    base
1104}
1105
1106/// Print a stack value in SON (Seq Object Notation) format
1107///
1108/// # Safety Requirements
1109/// The StackValue must be valid and not previously dropped. For strings,
1110/// the pointer (slot1) must point to valid, readable memory of length slot2.
1111/// This is guaranteed when called from stack.dump on freshly computed values.
1112fn print_stack_value(sv: &StackValue) {
1113    use crate::son::{SonConfig, value_to_son};
1114
1115    // Safety: We must clone the StackValue before converting to Value because
1116    // stack_value_to_value takes ownership of heap-allocated data (via Box::from_raw
1117    // for maps, Arc::from_raw for variants, etc.). Without cloning, the Value would
1118    // be dropped after printing, freeing the memory, and then the later drop_stack_value
1119    // loop would double-free. clone_stack_value properly handles refcounting.
1120    let cloned = unsafe { clone_stack_value(sv) };
1121    let value = unsafe { stack_value_to_value(cloned) };
1122    let son = value_to_son(&value, &SonConfig::compact());
1123    print!("{}", son);
1124}
1125
1126/// Macro to create a test stack
1127#[macro_export]
1128macro_rules! test_stack {
1129    () => {{
1130        use $crate::tagged_stack::StackValue;
1131        static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1132        unsafe { BUFFER.as_mut_ptr() }
1133    }};
1134}
1135
1136#[cfg(test)]
1137mod tests {
1138    use super::*;
1139
1140    #[test]
1141    fn test_pick_negative_index_sets_error() {
1142        unsafe {
1143            crate::error::clear_runtime_error();
1144            let stack = alloc_test_stack();
1145            let stack = push(stack, Value::Int(100)); // some value
1146            let stack = push(stack, Value::Int(-1)); // negative index
1147
1148            let _stack = patch_seq_pick_op(stack);
1149
1150            assert!(crate::error::has_runtime_error());
1151            let error = crate::error::take_runtime_error().unwrap();
1152            assert!(error.contains("negative"));
1153        }
1154    }
1155
1156    #[test]
1157    fn test_pick_out_of_bounds_sets_error() {
1158        unsafe {
1159            crate::error::clear_runtime_error();
1160            let stack = alloc_test_stack();
1161            let stack = push(stack, Value::Int(100)); // only one value
1162            let stack = push(stack, Value::Int(10)); // index way too large
1163
1164            let _stack = patch_seq_pick_op(stack);
1165
1166            assert!(crate::error::has_runtime_error());
1167            let error = crate::error::take_runtime_error().unwrap();
1168            assert!(error.contains("exceeds stack depth"));
1169        }
1170    }
1171
1172    #[test]
1173    fn test_roll_negative_index_sets_error() {
1174        unsafe {
1175            crate::error::clear_runtime_error();
1176            let stack = alloc_test_stack();
1177            let stack = push(stack, Value::Int(100));
1178            let stack = push(stack, Value::Int(-1)); // negative index
1179
1180            let _stack = patch_seq_roll(stack);
1181
1182            assert!(crate::error::has_runtime_error());
1183            let error = crate::error::take_runtime_error().unwrap();
1184            assert!(error.contains("negative"));
1185        }
1186    }
1187
1188    #[test]
1189    fn test_roll_out_of_bounds_sets_error() {
1190        unsafe {
1191            crate::error::clear_runtime_error();
1192            let stack = alloc_test_stack();
1193            let stack = push(stack, Value::Int(100));
1194            let stack = push(stack, Value::Int(10)); // index way too large
1195
1196            let _stack = patch_seq_roll(stack);
1197
1198            assert!(crate::error::has_runtime_error());
1199            let error = crate::error::take_runtime_error().unwrap();
1200            assert!(error.contains("exceeds stack depth"));
1201        }
1202    }
1203}