seq_core/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! This module implements the stack using a contiguous array of 40-byte StackValue entries.
4//! Each StackValue has the layout: { slot0: discriminant, slot1-4: payload }
5//!
6//! The Stack type is a pointer to the "current position" (where next push goes).
7//! - Push: store at *sp, return sp + 1
8//! - Pop: return sp - 1, read from *(sp - 1)
9
10use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14/// Stack: A pointer to the current position in a contiguous array of StackValue.
15///
16/// Points to where the next value would be pushed.
17/// sp - 1 points to the top value, sp - 2 to second, etc.
18pub type Stack = *mut StackValue;
19
20/// Returns the size of a StackValue in bytes (for stack depth calculations)
21#[inline]
22pub fn stack_value_size() -> usize {
23    std::mem::size_of::<StackValue>()
24}
25
26/// Discriminant values matching codegen
27pub const DISC_INT: u64 = 0;
28pub const DISC_FLOAT: u64 = 1;
29pub const DISC_BOOL: u64 = 2;
30pub const DISC_STRING: u64 = 3;
31pub const DISC_VARIANT: u64 = 4;
32pub const DISC_MAP: u64 = 5;
33pub const DISC_QUOTATION: u64 = 6;
34pub const DISC_CLOSURE: u64 = 7;
35pub const DISC_CHANNEL: u64 = 8;
36pub const DISC_WEAVECTX: u64 = 9;
37pub const DISC_SYMBOL: u64 = 10;
38
39/// Convert a Value to a StackValue for pushing onto the tagged stack
40#[inline]
41pub fn value_to_stack_value(value: Value) -> StackValue {
42    match value {
43        Value::Int(i) => StackValue {
44            slot0: DISC_INT,
45            slot1: i as u64,
46            slot2: 0,
47            slot3: 0,
48            slot4: 0,
49        },
50        Value::Float(f) => StackValue {
51            slot0: DISC_FLOAT,
52            slot1: f.to_bits(),
53            slot2: 0,
54            slot3: 0,
55            slot4: 0,
56        },
57        Value::Bool(b) => StackValue {
58            slot0: DISC_BOOL,
59            slot1: if b { 1 } else { 0 },
60            slot2: 0,
61            slot3: 0,
62            slot4: 0,
63        },
64        Value::String(s) => {
65            // SeqString has: ptr, len, capacity, global
66            // Store these in slots 1-4
67            let (ptr, len, capacity, global) = s.into_raw_parts();
68            StackValue {
69                slot0: DISC_STRING,
70                slot1: ptr as u64,
71                slot2: len as u64,
72                slot3: capacity as u64,
73                slot4: if global { 1 } else { 0 },
74            }
75        }
76        Value::Symbol(s) => {
77            // Symbol uses the same SeqString representation as String
78            let (ptr, len, capacity, global) = s.into_raw_parts();
79            StackValue {
80                slot0: DISC_SYMBOL,
81                slot1: ptr as u64,
82                slot2: len as u64,
83                slot3: capacity as u64,
84                slot4: if global { 1 } else { 0 },
85            }
86        }
87        Value::Variant(v) => {
88            let ptr = Arc::into_raw(v) as u64;
89            StackValue {
90                slot0: DISC_VARIANT,
91                slot1: ptr,
92                slot2: 0,
93                slot3: 0,
94                slot4: 0,
95            }
96        }
97        Value::Map(m) => {
98            let ptr = Box::into_raw(m) as u64;
99            StackValue {
100                slot0: DISC_MAP,
101                slot1: ptr,
102                slot2: 0,
103                slot3: 0,
104                slot4: 0,
105            }
106        }
107        Value::Quotation { wrapper, impl_ } => StackValue {
108            slot0: DISC_QUOTATION,
109            slot1: wrapper as u64,
110            slot2: impl_ as u64,
111            slot3: 0,
112            slot4: 0,
113        },
114        Value::Closure { fn_ptr, env } => {
115            // Arc<[Value]> is a fat pointer - use Box to store it
116            let env_box = Box::new(env);
117            let env_ptr = Box::into_raw(env_box) as u64;
118            StackValue {
119                slot0: DISC_CLOSURE,
120                slot1: fn_ptr as u64,
121                slot2: env_ptr,
122                slot3: 0,
123                slot4: 0,
124            }
125        }
126        Value::Channel(ch) => {
127            // Store Arc<ChannelData> as raw pointer
128            let ptr = Arc::into_raw(ch) as u64;
129            StackValue {
130                slot0: DISC_CHANNEL,
131                slot1: ptr,
132                slot2: 0,
133                slot3: 0,
134                slot4: 0,
135            }
136        }
137        Value::WeaveCtx {
138            yield_chan,
139            resume_chan,
140        } => {
141            // Store both Arc<WeaveChannelData> as raw pointers
142            let yield_ptr = Arc::into_raw(yield_chan) as u64;
143            let resume_ptr = Arc::into_raw(resume_chan) as u64;
144            StackValue {
145                slot0: DISC_WEAVECTX,
146                slot1: yield_ptr,
147                slot2: resume_ptr,
148                slot3: 0,
149                slot4: 0,
150            }
151        }
152    }
153}
154
155/// Convert a StackValue back to a Value
156///
157/// # Safety
158/// The StackValue must contain valid data for its discriminant
159#[inline]
160pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
161    unsafe {
162        match sv.slot0 {
163            DISC_INT => Value::Int(sv.slot1 as i64),
164            DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
165            DISC_BOOL => Value::Bool(sv.slot1 != 0),
166            DISC_STRING => {
167                use crate::seqstring::SeqString;
168                let ptr = sv.slot1 as *const u8;
169                let len = sv.slot2 as usize;
170                let capacity = sv.slot3 as usize;
171                let global = sv.slot4 != 0;
172                Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
173            }
174            DISC_VARIANT => {
175                use crate::value::VariantData;
176                let arc = Arc::from_raw(sv.slot1 as *const VariantData);
177                Value::Variant(arc)
178            }
179            DISC_MAP => {
180                use crate::value::MapKey;
181                use std::collections::HashMap;
182                let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
183                Value::Map(boxed)
184            }
185            DISC_QUOTATION => Value::Quotation {
186                wrapper: sv.slot1 as usize,
187                impl_: sv.slot2 as usize,
188            },
189            DISC_CLOSURE => {
190                // Unbox the Arc<[Value]> that we boxed in value_to_stack_value
191                let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
192                Value::Closure {
193                    fn_ptr: sv.slot1 as usize,
194                    env: *env_box,
195                }
196            }
197            DISC_CHANNEL => {
198                use crate::value::ChannelData;
199                let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
200                Value::Channel(arc)
201            }
202            DISC_WEAVECTX => {
203                use crate::value::WeaveChannelData;
204                let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
205                let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
206                Value::WeaveCtx {
207                    yield_chan,
208                    resume_chan,
209                }
210            }
211            DISC_SYMBOL => {
212                use crate::seqstring::SeqString;
213                let ptr = sv.slot1 as *const u8;
214                let len = sv.slot2 as usize;
215                let capacity = sv.slot3 as usize;
216                let global = sv.slot4 != 0;
217                Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
218            }
219            _ => panic!("Invalid discriminant: {}", sv.slot0),
220        }
221    }
222}
223
224/// Clone a StackValue from LLVM IR - reads from src pointer, writes to dst pointer.
225/// This is the FFI-callable version for inline codegen that avoids ABI issues
226/// with passing large structs by value.
227///
228/// # Safety
229/// Both src and dst pointers must be valid and properly aligned StackValue pointers.
230#[unsafe(no_mangle)]
231pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
232    unsafe {
233        let sv = &*src;
234        let cloned = clone_stack_value(sv);
235        *dst = cloned;
236    }
237}
238
239/// Clone a StackValue, handling reference counting for heap types.
240///
241/// # Cloning Strategy by Type
242/// - **Int, Float, Bool, Quotation**: Bitwise copy (no heap allocation)
243/// - **String**: Deep copy to a new global (refcounted) string. This is necessary
244///   because the source may be an arena-allocated string that would become invalid
245///   when the arena resets. Global strings are heap-allocated with Arc refcounting.
246/// - **Variant**: Arc refcount increment (O(1), shares underlying data)
247/// - **Map**: Deep clone of the HashMap and all contained values
248/// - **Closure**: Deep clone of the Arc<[Value]> environment
249/// - **Channel**: Arc refcount increment (O(1), shares underlying sender/receiver)
250///
251/// # Safety
252/// The StackValue must contain valid data for its discriminant.
253#[inline]
254pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
255    unsafe {
256        match sv.slot0 {
257            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
258            DISC_STRING => {
259                // Deep copy: arena strings may become invalid, so always create a global string
260                let ptr = sv.slot1 as *const u8;
261                let len = sv.slot2 as usize;
262                debug_assert!(!ptr.is_null(), "String pointer is null");
263                // Read the string content without taking ownership
264                let slice = std::slice::from_raw_parts(ptr, len);
265                // Validate UTF-8 in debug builds, skip in release for performance
266                #[cfg(debug_assertions)]
267                let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
268                #[cfg(not(debug_assertions))]
269                let s = std::str::from_utf8_unchecked(slice);
270                // Clone to a new global string
271                let cloned = crate::seqstring::global_string(s.to_string());
272                let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
273                StackValue {
274                    slot0: DISC_STRING,
275                    slot1: new_ptr as u64,
276                    slot2: new_len as u64,
277                    slot3: new_cap as u64,
278                    slot4: if new_global { 1 } else { 0 },
279                }
280            }
281            DISC_VARIANT => {
282                use crate::value::VariantData;
283                let ptr = sv.slot1 as *const VariantData;
284                debug_assert!(!ptr.is_null(), "Variant pointer is null");
285                debug_assert!(
286                    (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
287                    "Variant pointer is misaligned"
288                );
289                let arc = Arc::from_raw(ptr);
290                let cloned = Arc::clone(&arc);
291                std::mem::forget(arc);
292                StackValue {
293                    slot0: DISC_VARIANT,
294                    slot1: Arc::into_raw(cloned) as u64,
295                    slot2: 0,
296                    slot3: 0,
297                    slot4: 0,
298                }
299            }
300            DISC_MAP => {
301                // Deep clone the map
302                use crate::value::MapKey;
303                use std::collections::HashMap;
304                let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
305                debug_assert!(!ptr.is_null(), "Map pointer is null");
306                debug_assert!(
307                    (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
308                    "Map pointer is misaligned"
309                );
310                let boxed = Box::from_raw(ptr);
311                let cloned = boxed.clone();
312                std::mem::forget(boxed);
313                StackValue {
314                    slot0: DISC_MAP,
315                    slot1: Box::into_raw(cloned) as u64,
316                    slot2: 0,
317                    slot3: 0,
318                    slot4: 0,
319                }
320            }
321            DISC_CLOSURE => {
322                // The env is stored as Box<Arc<[Value]>>
323                let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
324                debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
325                debug_assert!(
326                    (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
327                    "Closure env pointer is misaligned"
328                );
329                let env_arc = &*env_box_ptr;
330                let cloned_env = Arc::clone(env_arc);
331                // Box the cloned Arc
332                let new_env_box = Box::new(cloned_env);
333                StackValue {
334                    slot0: DISC_CLOSURE,
335                    slot1: sv.slot1,
336                    slot2: Box::into_raw(new_env_box) as u64,
337                    slot3: 0,
338                    slot4: 0,
339                }
340            }
341            DISC_CHANNEL => {
342                // Arc refcount increment - O(1) clone
343                use crate::value::ChannelData;
344                let ptr = sv.slot1 as *const ChannelData;
345                debug_assert!(!ptr.is_null(), "Channel pointer is null");
346                let arc = Arc::from_raw(ptr);
347                let cloned = Arc::clone(&arc);
348                std::mem::forget(arc);
349                StackValue {
350                    slot0: DISC_CHANNEL,
351                    slot1: Arc::into_raw(cloned) as u64,
352                    slot2: 0,
353                    slot3: 0,
354                    slot4: 0,
355                }
356            }
357            DISC_WEAVECTX => {
358                // Arc refcount increment for both channels - O(1) clone
359                use crate::value::WeaveChannelData;
360                let yield_ptr = sv.slot1 as *const WeaveChannelData;
361                let resume_ptr = sv.slot2 as *const WeaveChannelData;
362                debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
363                debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
364                let yield_arc = Arc::from_raw(yield_ptr);
365                let resume_arc = Arc::from_raw(resume_ptr);
366                let yield_cloned = Arc::clone(&yield_arc);
367                let resume_cloned = Arc::clone(&resume_arc);
368                std::mem::forget(yield_arc);
369                std::mem::forget(resume_arc);
370                StackValue {
371                    slot0: DISC_WEAVECTX,
372                    slot1: Arc::into_raw(yield_cloned) as u64,
373                    slot2: Arc::into_raw(resume_cloned) as u64,
374                    slot3: 0,
375                    slot4: 0,
376                }
377            }
378            DISC_SYMBOL => {
379                let capacity = sv.slot3 as usize;
380                let is_global = sv.slot4 != 0;
381
382                // Fast path (Issue #166): interned symbols can share the static pointer
383                // Interned symbols have capacity=0 and global=true
384                if capacity == 0 && is_global {
385                    let ptr = sv.slot1 as *const u8;
386                    let len = sv.slot2 as usize;
387
388                    // Safety: Interned symbols are guaranteed to point to valid static data.
389                    // The compiler generates these in get_symbol_global(), which always
390                    // creates valid string globals. A null pointer here indicates compiler bug.
391                    debug_assert!(
392                        !ptr.is_null(),
393                        "Interned symbol has null pointer in clone fast path"
394                    );
395
396                    // Create a new SeqString that shares the static data.
397                    // This properly maintains ownership semantics even though
398                    // Drop is a no-op for capacity=0 symbols.
399                    let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
400                    let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
401                    StackValue {
402                        slot0: DISC_SYMBOL,
403                        slot1: new_ptr as u64,
404                        slot2: new_len as u64,
405                        slot3: new_cap as u64,
406                        slot4: if new_global { 1 } else { 0 },
407                    }
408                } else {
409                    // Deep copy: arena symbols may become invalid
410                    let ptr = sv.slot1 as *const u8;
411                    let len = sv.slot2 as usize;
412                    debug_assert!(!ptr.is_null(), "Symbol pointer is null");
413                    let slice = std::slice::from_raw_parts(ptr, len);
414                    #[cfg(debug_assertions)]
415                    let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
416                    #[cfg(not(debug_assertions))]
417                    let s = std::str::from_utf8_unchecked(slice);
418                    let cloned = crate::seqstring::global_string(s.to_string());
419                    let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
420                    StackValue {
421                        slot0: DISC_SYMBOL,
422                        slot1: new_ptr as u64,
423                        slot2: new_len as u64,
424                        slot3: new_cap as u64,
425                        slot4: if new_global { 1 } else { 0 },
426                    }
427                }
428            }
429            _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
430        }
431    }
432}
433
434/// Drop a StackValue, decrementing refcounts for heap types
435///
436/// # Safety
437/// The StackValue must contain valid data for its discriminant.
438#[inline]
439pub unsafe fn drop_stack_value(sv: StackValue) {
440    unsafe {
441        match sv.slot0 {
442            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
443                // No heap allocation, nothing to drop
444            }
445            DISC_STRING => {
446                // Reconstruct SeqString and let it drop
447                use crate::seqstring::SeqString;
448                let ptr = sv.slot1 as *const u8;
449                let len = sv.slot2 as usize;
450                let capacity = sv.slot3 as usize;
451                let global = sv.slot4 != 0;
452                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
453                // SeqString::drop will free global strings, ignore arena strings
454            }
455            DISC_VARIANT => {
456                use crate::value::VariantData;
457                let _ = Arc::from_raw(sv.slot1 as *const VariantData);
458            }
459            DISC_MAP => {
460                use crate::value::MapKey;
461                use std::collections::HashMap;
462                let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
463            }
464            DISC_CLOSURE => {
465                // Unbox and drop the Arc<[Value]>
466                let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
467            }
468            DISC_CHANNEL => {
469                use crate::value::ChannelData;
470                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
471            }
472            DISC_WEAVECTX => {
473                use crate::value::WeaveChannelData;
474                let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
475                let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
476            }
477            DISC_SYMBOL => {
478                // Reconstruct SeqString and let it drop (same as String)
479                use crate::seqstring::SeqString;
480                let ptr = sv.slot1 as *const u8;
481                let len = sv.slot2 as usize;
482                let capacity = sv.slot3 as usize;
483                let global = sv.slot4 != 0;
484                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485            }
486            _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
487        }
488    }
489}
490
491// ============================================================================
492// Core Stack Operations
493// ============================================================================
494
495/// Push a value onto the stack
496///
497/// Stores the value at the current stack pointer and returns sp + 1.
498///
499/// # Safety
500/// Stack pointer must be valid and have room for the value.
501#[inline]
502pub unsafe fn push(stack: Stack, value: Value) -> Stack {
503    unsafe {
504        let sv = value_to_stack_value(value);
505        *stack = sv;
506        stack.add(1)
507    }
508}
509
510/// Push a StackValue directly onto the stack
511///
512/// # Safety
513/// Stack pointer must be valid and have room for the value.
514#[inline]
515pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
516    unsafe {
517        *stack = sv;
518        stack.add(1)
519    }
520}
521
522/// Pop a value from the stack
523///
524/// Returns (new_sp, value) where new_sp = sp - 1.
525///
526/// # Safety
527/// Stack must not be at base (must have at least one value).
528#[inline]
529pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
530    unsafe {
531        let new_sp = stack.sub(1);
532        let sv = *new_sp;
533        (new_sp, stack_value_to_value(sv))
534    }
535}
536
537/// Pop a StackValue directly from the stack
538///
539/// # Safety
540/// Stack must not be at base and must have at least one value.
541#[inline]
542pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
543    unsafe {
544        let new_sp = stack.sub(1);
545        let sv = *new_sp;
546        (new_sp, sv)
547    }
548}
549
550/// Pop two values from the stack (for binary operations)
551///
552/// Returns (new_sp, a, b) where a was below b on stack.
553/// Stack effect: ( a b -- ) returns (a, b)
554///
555/// # Safety
556/// Stack must have at least two values.
557#[inline]
558pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
559    unsafe {
560        let (sp, b) = pop(stack);
561        let (sp, a) = pop(sp);
562        (sp, a, b)
563    }
564}
565
566/// Pop three values from the stack (for ternary operations)
567///
568/// # Safety
569/// Stack must have at least three values.
570#[inline]
571pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
572    unsafe {
573        let (sp, c) = pop(stack);
574        let (sp, b) = pop(sp);
575        let (sp, a) = pop(sp);
576        (sp, a, b, c)
577    }
578}
579
580/// Peek at the top value without removing it
581///
582/// # Safety
583/// Stack must have at least one value.
584#[inline]
585pub unsafe fn peek(stack: Stack) -> Value {
586    unsafe {
587        let sv = *stack.sub(1);
588        // Don't consume - need to clone for heap types
589        stack_value_to_value(clone_stack_value(&sv))
590    }
591}
592
593/// Peek at the raw StackValue without removing it
594///
595/// # Safety
596/// Stack must have at least one value.
597#[inline]
598pub unsafe fn peek_sv(stack: Stack) -> StackValue {
599    unsafe { *stack.sub(1) }
600}
601
602/// Check if stack is empty (at base pointer)
603/// Note: With tagged stack, we need to compare against base, not null
604#[inline]
605pub fn is_empty(_stack: Stack) -> bool {
606    // For now, assume stack is never truly empty in valid programs
607    // The caller should track base pointer if needed
608    false
609}
610
611// ============================================================================
612// FFI Stack Operations
613// ============================================================================
614
615/// Duplicate the top value on the stack: ( a -- a a )
616///
617/// # Safety
618/// Stack must have at least one value.
619#[unsafe(no_mangle)]
620pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
621    unsafe {
622        let sv = peek_sv(stack);
623        let cloned = clone_stack_value(&sv);
624        push_sv(stack, cloned)
625    }
626}
627
628/// Drop the top value from the stack: ( a -- )
629///
630/// # Safety
631/// Stack must have at least one value.
632#[inline]
633pub unsafe fn drop_top(stack: Stack) -> Stack {
634    unsafe {
635        let (new_sp, sv) = pop_sv(stack);
636        drop_stack_value(sv);
637        new_sp
638    }
639}
640
641/// Alias for drop to avoid LLVM keyword conflicts
642///
643/// # Safety
644/// Stack must have at least one value.
645#[unsafe(no_mangle)]
646pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
647    unsafe { drop_top(stack) }
648}
649
650/// Push an arbitrary Value onto the stack (for LLVM codegen)
651///
652/// # Safety
653/// Stack pointer must be valid and have room for the value.
654#[allow(improper_ctypes_definitions)]
655#[unsafe(no_mangle)]
656pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
657    unsafe { push(stack, value) }
658}
659
660/// Swap the top two values: ( a b -- b a )
661///
662/// # Safety
663/// Stack must have at least two values.
664#[unsafe(no_mangle)]
665pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
666    unsafe {
667        let ptr_b = stack.sub(1);
668        let ptr_a = stack.sub(2);
669        let a = *ptr_a;
670        let b = *ptr_b;
671        *ptr_a = b;
672        *ptr_b = a;
673        stack
674    }
675}
676
677/// Copy the second value to the top: ( a b -- a b a )
678///
679/// # Safety
680/// Stack must have at least two values.
681#[unsafe(no_mangle)]
682pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
683    unsafe {
684        let sv_a = *stack.sub(2);
685        let cloned = clone_stack_value(&sv_a);
686        push_sv(stack, cloned)
687    }
688}
689
690/// Rotate the top three values: ( a b c -- b c a )
691///
692/// # Safety
693/// Stack must have at least three values.
694#[unsafe(no_mangle)]
695pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
696    unsafe {
697        let ptr_c = stack.sub(1);
698        let ptr_b = stack.sub(2);
699        let ptr_a = stack.sub(3);
700        let a = *ptr_a;
701        let b = *ptr_b;
702        let c = *ptr_c;
703        *ptr_a = b;
704        *ptr_b = c;
705        *ptr_c = a;
706        stack
707    }
708}
709
710/// Remove the second value: ( a b -- b )
711///
712/// # Safety
713/// Stack must have at least two values.
714#[unsafe(no_mangle)]
715pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
716    unsafe {
717        let ptr_b = stack.sub(1);
718        let ptr_a = stack.sub(2);
719        let a = *ptr_a;
720        let b = *ptr_b;
721        drop_stack_value(a);
722        *ptr_a = b;
723        stack.sub(1)
724    }
725}
726
727/// Copy top value below second value: ( a b -- b a b )
728///
729/// # Safety
730/// Stack must have at least two values.
731#[unsafe(no_mangle)]
732pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
733    unsafe {
734        let ptr_b = stack.sub(1);
735        let ptr_a = stack.sub(2);
736        let a = *ptr_a;
737        let b = *ptr_b;
738        let b_clone = clone_stack_value(&b);
739        *ptr_a = b;
740        *ptr_b = a;
741        push_sv(stack, b_clone)
742    }
743}
744
745/// Duplicate top two values: ( a b -- a b a b )
746///
747/// # Safety
748/// Stack must have at least two values.
749#[unsafe(no_mangle)]
750pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
751    unsafe {
752        let sv_a = *stack.sub(2);
753        let sv_b = *stack.sub(1);
754        let a_clone = clone_stack_value(&sv_a);
755        let b_clone = clone_stack_value(&sv_b);
756        let sp = push_sv(stack, a_clone);
757        push_sv(sp, b_clone)
758    }
759}
760
761/// Pick: Copy the nth value to the top
762/// ( ... xn ... x1 x0 n -- ... xn ... x1 x0 xn )
763///
764/// # Safety
765/// Stack must have at least n+1 values (plus the index value).
766///
767/// # Errors
768/// Sets runtime error if:
769/// - The top value is not an Int
770/// - n is negative
771/// - n exceeds the current stack depth
772#[unsafe(no_mangle)]
773pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
774    unsafe {
775        // Get n from top of stack
776        let (sp, n_val) = pop(stack);
777        let n_raw = match n_val {
778            Value::Int(i) => i,
779            _ => {
780                // Value already consumed by pop, return sp (index consumed)
781                crate::error::set_runtime_error("pick: expected Int index on top of stack");
782                return sp;
783            }
784        };
785
786        // Bounds check: n must be non-negative
787        if n_raw < 0 {
788            crate::error::set_runtime_error(format!(
789                "pick: index cannot be negative (got {})",
790                n_raw
791            ));
792            return sp; // Return stack with index consumed
793        }
794        let n = n_raw as usize;
795
796        // Check stack depth to prevent out-of-bounds access
797        let base = get_stack_base();
798        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
799        if n >= depth {
800            crate::error::set_runtime_error(format!(
801                "pick: index {} exceeds stack depth {} (need at least {} values)",
802                n,
803                depth,
804                n + 1
805            ));
806            return sp; // Return stack with index consumed
807        }
808
809        // Get the value at depth n (0 = top after popping n)
810        let sv = *sp.sub(n + 1);
811        let cloned = clone_stack_value(&sv);
812        push_sv(sp, cloned)
813    }
814}
815
816/// Roll: Rotate n+1 items, bringing the item at depth n to the top
817/// ( x_n x_(n-1) ... x_1 x_0 n -- x_(n-1) ... x_1 x_0 x_n )
818///
819/// # Safety
820/// Stack must have at least n+1 values (plus the index value).
821///
822/// # Errors
823/// Sets runtime error if:
824/// - The top value is not an Int
825/// - n is negative
826/// - n exceeds the current stack depth
827#[unsafe(no_mangle)]
828pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
829    unsafe {
830        // Get n from top of stack
831        let (sp, n_val) = pop(stack);
832        let n_raw = match n_val {
833            Value::Int(i) => i,
834            _ => {
835                // Value already consumed by pop, return sp (index consumed)
836                crate::error::set_runtime_error("roll: expected Int index on top of stack");
837                return sp;
838            }
839        };
840
841        // Bounds check: n must be non-negative
842        if n_raw < 0 {
843            crate::error::set_runtime_error(format!(
844                "roll: index cannot be negative (got {})",
845                n_raw
846            ));
847            return sp; // Return stack with index consumed
848        }
849        let n = n_raw as usize;
850
851        if n == 0 {
852            return sp;
853        }
854        if n == 1 {
855            return patch_seq_swap(sp);
856        }
857        if n == 2 {
858            return patch_seq_rot(sp);
859        }
860
861        // Check stack depth to prevent out-of-bounds access
862        let base = get_stack_base();
863        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
864        if n >= depth {
865            crate::error::set_runtime_error(format!(
866                "roll: index {} exceeds stack depth {} (need at least {} values)",
867                n,
868                depth,
869                n + 1
870            ));
871            return sp; // Return stack with index consumed
872        }
873
874        // General case: save item at depth n, shift others, put saved at top
875        let src_ptr = sp.sub(n + 1);
876        let saved = *src_ptr;
877
878        // Shift items down: memmove from src+1 to src, n items
879        std::ptr::copy(src_ptr.add(1), src_ptr, n);
880
881        // Put saved item at top (sp - 1)
882        *sp.sub(1) = saved;
883
884        sp
885    }
886}
887
888/// Clone a stack segment
889///
890/// Clones `count` StackValues from src to dst, handling refcounts.
891///
892/// # Safety
893/// Both src and dst must be valid stack pointers with sufficient space for count values.
894pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
895    unsafe {
896        for i in 0..count {
897            let sv = *src.sub(count - i);
898            let cloned = clone_stack_value(&sv);
899            *dst.add(i) = cloned;
900        }
901    }
902}
903
904// ============================================================================
905// Coroutine-Local Stack Base Tracking (for spawn)
906// ============================================================================
907//
908// IMPORTANT: We use May's coroutine_local! instead of thread_local! because
909// May coroutines can migrate between OS threads. Using thread_local would cause
910// STACK_BASE to be lost when a coroutine is moved to a different thread.
911
912use std::cell::Cell;
913
914// Use coroutine-local storage that moves with the coroutine
915may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
916
917/// Set the current strand's stack base (called at strand entry)
918///
919/// # Safety
920/// Base pointer must be a valid stack pointer for the current strand.
921#[unsafe(no_mangle)]
922pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
923    STACK_BASE.with(|cell| {
924        cell.set(base as usize);
925    });
926}
927
928/// Get the current strand's stack base
929#[inline]
930pub fn get_stack_base() -> Stack {
931    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
932}
933
934/// Clone the current stack for spawning a child strand
935///
936/// Allocates a new stack buffer and copies all values from the current stack.
937/// Returns a pointer to the first value in the new stack (like Stack convention).
938///
939/// # Safety
940/// - Current stack must have a valid base set via set_stack_base
941/// - sp must point to a valid position within the current stack
942#[unsafe(no_mangle)]
943pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
944    unsafe {
945        let (new_sp, _base) = clone_stack_with_base(sp);
946        new_sp
947    }
948}
949
950/// Clone the current stack for spawning, returning both base and sp.
951///
952/// This is used by spawn to create a copy of the parent's stack for the child strand.
953/// Returns (new_sp, new_base) so the spawn mechanism can set STACK_BASE for the child.
954///
955/// # Safety
956/// Current stack must have a valid base set via set_stack_base and sp must point to a valid position.
957pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
958    let base = get_stack_base();
959    if base.is_null() {
960        panic!("clone_stack: stack base not set");
961    }
962
963    // Calculate depth (number of values on stack)
964    let depth = unsafe { sp.offset_from(base) as usize };
965
966    if depth == 0 {
967        // Empty stack - still need to allocate a buffer
968        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
969        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
970        let new_base = new_stack.base;
971        std::mem::forget(new_stack); // Don't drop - caller owns memory
972        return (new_base, new_base);
973    }
974
975    // Allocate new stack with capacity for at least the current depth
976    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
977    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
978    let new_stack = TaggedStack::new(capacity);
979    let new_base = new_stack.base;
980    std::mem::forget(new_stack); // Don't drop - caller owns memory
981
982    // Clone all values from base to sp
983    unsafe {
984        for i in 0..depth {
985            let sv = &*base.add(i);
986            let cloned = clone_stack_value(sv);
987            *new_base.add(i) = cloned;
988        }
989    }
990
991    // Return both sp and base
992    unsafe { (new_base.add(depth), new_base) }
993}
994
995// ============================================================================
996// Short Aliases for Internal/Test Use
997// ============================================================================
998
999pub use patch_seq_2dup as two_dup;
1000pub use patch_seq_dup as dup;
1001pub use patch_seq_nip as nip;
1002pub use patch_seq_over as over;
1003pub use patch_seq_pick_op as pick;
1004pub use patch_seq_roll as roll;
1005pub use patch_seq_rot as rot;
1006pub use patch_seq_swap as swap;
1007pub use patch_seq_tuck as tuck;
1008
1009// ============================================================================
1010// Stack Allocation Helpers
1011// ============================================================================
1012
1013/// Allocate a new stack with default capacity.
1014/// Returns a pointer to the base of the stack (where first push goes).
1015///
1016/// # Note
1017/// The returned stack is allocated but not tracked.
1018/// The memory will be leaked when the caller is done with it.
1019/// This is used for temporary stacks in quotation calls and tests.
1020pub fn alloc_stack() -> Stack {
1021    use crate::tagged_stack::TaggedStack;
1022    let stack = TaggedStack::with_default_capacity();
1023    let base = stack.base;
1024    std::mem::forget(stack); // Don't drop - caller owns memory
1025    base
1026}
1027
1028/// Allocate a new test stack and set it as the stack base
1029/// This is used in tests that need clone_stack to work
1030pub fn alloc_test_stack() -> Stack {
1031    let stack = alloc_stack();
1032    unsafe { patch_seq_set_stack_base(stack) };
1033    stack
1034}
1035
1036/// Dump all values on the stack (for REPL debugging)
1037///
1038/// Prints all stack values in a readable format, then clears the stack.
1039/// Returns the stack base (empty stack).
1040///
1041/// # Safety
1042/// - Stack base must have been set via set_stack_base
1043/// - sp must be a valid stack pointer
1044/// - All stack values between base and sp must be valid
1045#[unsafe(no_mangle)]
1046pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1047    let base = get_stack_base();
1048    if base.is_null() {
1049        eprintln!("[stack.dump: base not set]");
1050        return sp;
1051    }
1052
1053    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1054
1055    if depth == 0 {
1056        println!("»");
1057    } else {
1058        use std::io::Write;
1059        print!("» ");
1060        for i in 0..depth {
1061            if i > 0 {
1062                print!(" ");
1063            }
1064            unsafe {
1065                let sv = *base.add(i);
1066                print_stack_value(&sv);
1067            }
1068        }
1069        println!();
1070        // Flush stdout to ensure output is visible immediately
1071        // This prevents partial output if the program terminates unexpectedly
1072        let _ = std::io::stdout().flush();
1073
1074        // Drop all heap-allocated values to prevent memory leaks
1075        for i in 0..depth {
1076            unsafe {
1077                let sv = *base.add(i);
1078                drop_stack_value(sv);
1079            }
1080        }
1081    }
1082
1083    // Return base (empty stack)
1084    base
1085}
1086
1087/// Print a stack value in SON (Seq Object Notation) format
1088///
1089/// # Safety Requirements
1090/// The StackValue must be valid and not previously dropped. For strings,
1091/// the pointer (slot1) must point to valid, readable memory of length slot2.
1092/// This is guaranteed when called from stack.dump on freshly computed values.
1093fn print_stack_value(sv: &StackValue) {
1094    use crate::son::{SonConfig, value_to_son};
1095
1096    // Safety: We must clone the StackValue before converting to Value because
1097    // stack_value_to_value takes ownership of heap-allocated data (via Box::from_raw
1098    // for maps, Arc::from_raw for variants, etc.). Without cloning, the Value would
1099    // be dropped after printing, freeing the memory, and then the later drop_stack_value
1100    // loop would double-free. clone_stack_value properly handles refcounting.
1101    let cloned = unsafe { clone_stack_value(sv) };
1102    let value = unsafe { stack_value_to_value(cloned) };
1103    let son = value_to_son(&value, &SonConfig::compact());
1104    print!("{}", son);
1105}
1106
1107/// Macro to create a test stack
1108#[macro_export]
1109macro_rules! test_stack {
1110    () => {{
1111        use $crate::tagged_stack::StackValue;
1112        static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1113        unsafe { BUFFER.as_mut_ptr() }
1114    }};
1115}
1116
1117#[cfg(test)]
1118mod tests {
1119    use super::*;
1120
1121    #[test]
1122    fn test_pick_negative_index_sets_error() {
1123        unsafe {
1124            crate::error::clear_runtime_error();
1125            let stack = alloc_test_stack();
1126            let stack = push(stack, Value::Int(100)); // some value
1127            let stack = push(stack, Value::Int(-1)); // negative index
1128
1129            let _stack = patch_seq_pick_op(stack);
1130
1131            assert!(crate::error::has_runtime_error());
1132            let error = crate::error::take_runtime_error().unwrap();
1133            assert!(error.contains("negative"));
1134        }
1135    }
1136
1137    #[test]
1138    fn test_pick_out_of_bounds_sets_error() {
1139        unsafe {
1140            crate::error::clear_runtime_error();
1141            let stack = alloc_test_stack();
1142            let stack = push(stack, Value::Int(100)); // only one value
1143            let stack = push(stack, Value::Int(10)); // index way too large
1144
1145            let _stack = patch_seq_pick_op(stack);
1146
1147            assert!(crate::error::has_runtime_error());
1148            let error = crate::error::take_runtime_error().unwrap();
1149            assert!(error.contains("exceeds stack depth"));
1150        }
1151    }
1152
1153    #[test]
1154    fn test_roll_negative_index_sets_error() {
1155        unsafe {
1156            crate::error::clear_runtime_error();
1157            let stack = alloc_test_stack();
1158            let stack = push(stack, Value::Int(100));
1159            let stack = push(stack, Value::Int(-1)); // negative index
1160
1161            let _stack = patch_seq_roll(stack);
1162
1163            assert!(crate::error::has_runtime_error());
1164            let error = crate::error::take_runtime_error().unwrap();
1165            assert!(error.contains("negative"));
1166        }
1167    }
1168
1169    #[test]
1170    fn test_roll_out_of_bounds_sets_error() {
1171        unsafe {
1172            crate::error::clear_runtime_error();
1173            let stack = alloc_test_stack();
1174            let stack = push(stack, Value::Int(100));
1175            let stack = push(stack, Value::Int(10)); // index way too large
1176
1177            let _stack = patch_seq_roll(stack);
1178
1179            assert!(crate::error::has_runtime_error());
1180            let error = crate::error::take_runtime_error().unwrap();
1181            assert!(error.contains("exceeds stack depth"));
1182        }
1183    }
1184}