Skip to main content

seq_core/
stack.rs

1//! Tagged Stack Implementation
2//!
3//! This module implements the stack using a contiguous array of 40-byte StackValue entries.
4//! Each StackValue has the layout: { slot0: discriminant, slot1-4: payload }
5//!
6//! The Stack type is a pointer to the "current position" (where next push goes).
7//! - Push: store at *sp, return sp + 1
8//! - Pop: return sp - 1, read from *(sp - 1)
9
10use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14/// Stack: A pointer to the current position in a contiguous array of StackValue.
15///
16/// Points to where the next value would be pushed.
17/// sp - 1 points to the top value, sp - 2 to second, etc.
18pub type Stack = *mut StackValue;
19
20/// Returns the size of a StackValue in bytes (for stack depth calculations)
21#[inline]
22pub fn stack_value_size() -> usize {
23    std::mem::size_of::<StackValue>()
24}
25
26/// Discriminant values matching codegen
27pub const DISC_INT: u64 = 0;
28pub const DISC_FLOAT: u64 = 1;
29pub const DISC_BOOL: u64 = 2;
30pub const DISC_STRING: u64 = 3;
31pub const DISC_VARIANT: u64 = 4;
32pub const DISC_MAP: u64 = 5;
33pub const DISC_QUOTATION: u64 = 6;
34pub const DISC_CLOSURE: u64 = 7;
35pub const DISC_CHANNEL: u64 = 8;
36pub const DISC_WEAVECTX: u64 = 9;
37pub const DISC_SYMBOL: u64 = 10;
38
39/// Convert a Value to a StackValue for pushing onto the tagged stack
40#[inline]
41pub fn value_to_stack_value(value: Value) -> StackValue {
42    match value {
43        Value::Int(i) => StackValue {
44            slot0: DISC_INT,
45            slot1: i as u64,
46            slot2: 0,
47            slot3: 0,
48            slot4: 0,
49        },
50        Value::Float(f) => StackValue {
51            slot0: DISC_FLOAT,
52            slot1: f.to_bits(),
53            slot2: 0,
54            slot3: 0,
55            slot4: 0,
56        },
57        Value::Bool(b) => StackValue {
58            slot0: DISC_BOOL,
59            slot1: if b { 1 } else { 0 },
60            slot2: 0,
61            slot3: 0,
62            slot4: 0,
63        },
64        Value::String(s) => {
65            // SeqString has: ptr, len, capacity, global
66            // Store these in slots 1-4
67            let (ptr, len, capacity, global) = s.into_raw_parts();
68            StackValue {
69                slot0: DISC_STRING,
70                slot1: ptr as u64,
71                slot2: len as u64,
72                slot3: capacity as u64,
73                slot4: if global { 1 } else { 0 },
74            }
75        }
76        Value::Symbol(s) => {
77            // Symbol uses the same SeqString representation as String
78            let (ptr, len, capacity, global) = s.into_raw_parts();
79            StackValue {
80                slot0: DISC_SYMBOL,
81                slot1: ptr as u64,
82                slot2: len as u64,
83                slot3: capacity as u64,
84                slot4: if global { 1 } else { 0 },
85            }
86        }
87        Value::Variant(v) => {
88            let ptr = Arc::into_raw(v) as u64;
89            StackValue {
90                slot0: DISC_VARIANT,
91                slot1: ptr,
92                slot2: 0,
93                slot3: 0,
94                slot4: 0,
95            }
96        }
97        Value::Map(m) => {
98            let ptr = Box::into_raw(m) as u64;
99            StackValue {
100                slot0: DISC_MAP,
101                slot1: ptr,
102                slot2: 0,
103                slot3: 0,
104                slot4: 0,
105            }
106        }
107        Value::Quotation { wrapper, impl_ } => StackValue {
108            slot0: DISC_QUOTATION,
109            slot1: wrapper as u64,
110            slot2: impl_ as u64,
111            slot3: 0,
112            slot4: 0,
113        },
114        Value::Closure { fn_ptr, env } => {
115            // Arc<[Value]> is a fat pointer - use Box to store it
116            let env_box = Box::new(env);
117            let env_ptr = Box::into_raw(env_box) as u64;
118            StackValue {
119                slot0: DISC_CLOSURE,
120                slot1: fn_ptr as u64,
121                slot2: env_ptr,
122                slot3: 0,
123                slot4: 0,
124            }
125        }
126        Value::Channel(ch) => {
127            // Store Arc<ChannelData> as raw pointer
128            let ptr = Arc::into_raw(ch) as u64;
129            StackValue {
130                slot0: DISC_CHANNEL,
131                slot1: ptr,
132                slot2: 0,
133                slot3: 0,
134                slot4: 0,
135            }
136        }
137        Value::WeaveCtx {
138            yield_chan,
139            resume_chan,
140        } => {
141            // Store both Arc<WeaveChannelData> as raw pointers
142            let yield_ptr = Arc::into_raw(yield_chan) as u64;
143            let resume_ptr = Arc::into_raw(resume_chan) as u64;
144            StackValue {
145                slot0: DISC_WEAVECTX,
146                slot1: yield_ptr,
147                slot2: resume_ptr,
148                slot3: 0,
149                slot4: 0,
150            }
151        }
152    }
153}
154
155/// Convert a StackValue back to a Value
156///
157/// # Safety
158/// The StackValue must contain valid data for its discriminant
159#[inline]
160pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
161    unsafe {
162        match sv.slot0 {
163            DISC_INT => Value::Int(sv.slot1 as i64),
164            DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
165            DISC_BOOL => Value::Bool(sv.slot1 != 0),
166            DISC_STRING => {
167                use crate::seqstring::SeqString;
168                let ptr = sv.slot1 as *const u8;
169                let len = sv.slot2 as usize;
170                let capacity = sv.slot3 as usize;
171                let global = sv.slot4 != 0;
172                Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
173            }
174            DISC_VARIANT => {
175                use crate::value::VariantData;
176                let arc = Arc::from_raw(sv.slot1 as *const VariantData);
177                Value::Variant(arc)
178            }
179            DISC_MAP => {
180                use crate::value::MapKey;
181                use std::collections::HashMap;
182                let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
183                Value::Map(boxed)
184            }
185            DISC_QUOTATION => Value::Quotation {
186                wrapper: sv.slot1 as usize,
187                impl_: sv.slot2 as usize,
188            },
189            DISC_CLOSURE => {
190                // Unbox the Arc<[Value]> that we boxed in value_to_stack_value
191                let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
192                Value::Closure {
193                    fn_ptr: sv.slot1 as usize,
194                    env: *env_box,
195                }
196            }
197            DISC_CHANNEL => {
198                use crate::value::ChannelData;
199                let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
200                Value::Channel(arc)
201            }
202            DISC_WEAVECTX => {
203                use crate::value::WeaveChannelData;
204                let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
205                let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
206                Value::WeaveCtx {
207                    yield_chan,
208                    resume_chan,
209                }
210            }
211            DISC_SYMBOL => {
212                use crate::seqstring::SeqString;
213                let ptr = sv.slot1 as *const u8;
214                let len = sv.slot2 as usize;
215                let capacity = sv.slot3 as usize;
216                let global = sv.slot4 != 0;
217                Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
218            }
219            _ => panic!("Invalid discriminant: {}", sv.slot0),
220        }
221    }
222}
223
224/// Clone a StackValue from LLVM IR - reads from src pointer, writes to dst pointer.
225/// This is the FFI-callable version for inline codegen that avoids ABI issues
226/// with passing large structs by value.
227///
228/// # Safety
229/// Both src and dst pointers must be valid and properly aligned StackValue pointers.
230#[unsafe(no_mangle)]
231pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
232    unsafe {
233        let sv = &*src;
234        let cloned = clone_stack_value(sv);
235        *dst = cloned;
236    }
237}
238
239/// Clone a StackValue, handling reference counting for heap types.
240///
241/// # Cloning Strategy by Type
242/// - **Int, Float, Bool, Quotation**: Bitwise copy (no heap allocation)
243/// - **String**: Deep copy to a new global (refcounted) string. This is necessary
244///   because the source may be an arena-allocated string that would become invalid
245///   when the arena resets. Global strings are heap-allocated with Arc refcounting.
246/// - **Variant**: Arc refcount increment (O(1), shares underlying data)
247/// - **Map**: Deep clone of the HashMap and all contained values
248/// - **Closure**: Deep clone of the Arc<[Value]> environment
249/// - **Channel**: Arc refcount increment (O(1), shares underlying sender/receiver)
250///
251/// # Safety
252/// The StackValue must contain valid data for its discriminant.
253#[inline]
254pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
255    unsafe {
256        match sv.slot0 {
257            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
258            DISC_STRING => {
259                // Deep copy: arena strings may become invalid, so always create a global string
260                let ptr = sv.slot1 as *const u8;
261                let len = sv.slot2 as usize;
262                debug_assert!(!ptr.is_null(), "String pointer is null");
263                // Read the string content without taking ownership
264                let slice = std::slice::from_raw_parts(ptr, len);
265                // Validate UTF-8 in debug builds, skip in release for performance
266                #[cfg(debug_assertions)]
267                let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
268                #[cfg(not(debug_assertions))]
269                let s = std::str::from_utf8_unchecked(slice);
270                // Clone to a new global string
271                let cloned = crate::seqstring::global_string(s.to_string());
272                let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
273                StackValue {
274                    slot0: DISC_STRING,
275                    slot1: new_ptr as u64,
276                    slot2: new_len as u64,
277                    slot3: new_cap as u64,
278                    slot4: if new_global { 1 } else { 0 },
279                }
280            }
281            DISC_VARIANT => {
282                use crate::value::VariantData;
283                let ptr = sv.slot1 as *const VariantData;
284                debug_assert!(!ptr.is_null(), "Variant pointer is null");
285                debug_assert!(
286                    (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
287                    "Variant pointer is misaligned"
288                );
289                let arc = Arc::from_raw(ptr);
290                let cloned = Arc::clone(&arc);
291                std::mem::forget(arc);
292                StackValue {
293                    slot0: DISC_VARIANT,
294                    slot1: Arc::into_raw(cloned) as u64,
295                    slot2: 0,
296                    slot3: 0,
297                    slot4: 0,
298                }
299            }
300            DISC_MAP => {
301                // Deep clone the map
302                use crate::value::MapKey;
303                use std::collections::HashMap;
304                let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
305                debug_assert!(!ptr.is_null(), "Map pointer is null");
306                debug_assert!(
307                    (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
308                    "Map pointer is misaligned"
309                );
310                let boxed = Box::from_raw(ptr);
311                let cloned = boxed.clone();
312                std::mem::forget(boxed);
313                StackValue {
314                    slot0: DISC_MAP,
315                    slot1: Box::into_raw(cloned) as u64,
316                    slot2: 0,
317                    slot3: 0,
318                    slot4: 0,
319                }
320            }
321            DISC_CLOSURE => {
322                // The env is stored as Box<Arc<[Value]>>
323                let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
324                debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
325                debug_assert!(
326                    (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
327                    "Closure env pointer is misaligned"
328                );
329                let env_arc = &*env_box_ptr;
330                let cloned_env = Arc::clone(env_arc);
331                // Box the cloned Arc
332                let new_env_box = Box::new(cloned_env);
333                StackValue {
334                    slot0: DISC_CLOSURE,
335                    slot1: sv.slot1,
336                    slot2: Box::into_raw(new_env_box) as u64,
337                    slot3: 0,
338                    slot4: 0,
339                }
340            }
341            DISC_CHANNEL => {
342                // Arc refcount increment - O(1) clone
343                use crate::value::ChannelData;
344                let ptr = sv.slot1 as *const ChannelData;
345                debug_assert!(!ptr.is_null(), "Channel pointer is null");
346                let arc = Arc::from_raw(ptr);
347                let cloned = Arc::clone(&arc);
348                std::mem::forget(arc);
349                StackValue {
350                    slot0: DISC_CHANNEL,
351                    slot1: Arc::into_raw(cloned) as u64,
352                    slot2: 0,
353                    slot3: 0,
354                    slot4: 0,
355                }
356            }
357            DISC_WEAVECTX => {
358                // Arc refcount increment for both channels - O(1) clone
359                use crate::value::WeaveChannelData;
360                let yield_ptr = sv.slot1 as *const WeaveChannelData;
361                let resume_ptr = sv.slot2 as *const WeaveChannelData;
362                debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
363                debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
364                let yield_arc = Arc::from_raw(yield_ptr);
365                let resume_arc = Arc::from_raw(resume_ptr);
366                let yield_cloned = Arc::clone(&yield_arc);
367                let resume_cloned = Arc::clone(&resume_arc);
368                std::mem::forget(yield_arc);
369                std::mem::forget(resume_arc);
370                StackValue {
371                    slot0: DISC_WEAVECTX,
372                    slot1: Arc::into_raw(yield_cloned) as u64,
373                    slot2: Arc::into_raw(resume_cloned) as u64,
374                    slot3: 0,
375                    slot4: 0,
376                }
377            }
378            DISC_SYMBOL => {
379                let capacity = sv.slot3 as usize;
380                let is_global = sv.slot4 != 0;
381
382                // Fast path (Issue #166): interned symbols can share the static pointer
383                // Interned symbols have capacity=0 and global=true
384                if capacity == 0 && is_global {
385                    let ptr = sv.slot1 as *const u8;
386                    let len = sv.slot2 as usize;
387
388                    // Safety: Interned symbols are guaranteed to point to valid static data.
389                    // The compiler generates these in get_symbol_global(), which always
390                    // creates valid string globals. A null pointer here indicates compiler bug.
391                    debug_assert!(
392                        !ptr.is_null(),
393                        "Interned symbol has null pointer in clone fast path"
394                    );
395
396                    // Create a new SeqString that shares the static data.
397                    // This properly maintains ownership semantics even though
398                    // Drop is a no-op for capacity=0 symbols.
399                    let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
400                    let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
401                    StackValue {
402                        slot0: DISC_SYMBOL,
403                        slot1: new_ptr as u64,
404                        slot2: new_len as u64,
405                        slot3: new_cap as u64,
406                        slot4: if new_global { 1 } else { 0 },
407                    }
408                } else {
409                    // Deep copy: arena symbols may become invalid
410                    let ptr = sv.slot1 as *const u8;
411                    let len = sv.slot2 as usize;
412                    debug_assert!(!ptr.is_null(), "Symbol pointer is null");
413                    let slice = std::slice::from_raw_parts(ptr, len);
414                    #[cfg(debug_assertions)]
415                    let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
416                    #[cfg(not(debug_assertions))]
417                    let s = std::str::from_utf8_unchecked(slice);
418                    let cloned = crate::seqstring::global_string(s.to_string());
419                    let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
420                    StackValue {
421                        slot0: DISC_SYMBOL,
422                        slot1: new_ptr as u64,
423                        slot2: new_len as u64,
424                        slot3: new_cap as u64,
425                        slot4: if new_global { 1 } else { 0 },
426                    }
427                }
428            }
429            _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
430        }
431    }
432}
433
434/// Drop a StackValue, decrementing refcounts for heap types
435///
436/// # Safety
437/// The StackValue must contain valid data for its discriminant.
438#[inline]
439pub unsafe fn drop_stack_value(sv: StackValue) {
440    unsafe {
441        match sv.slot0 {
442            DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
443                // No heap allocation, nothing to drop
444            }
445            DISC_STRING => {
446                // Reconstruct SeqString and let it drop
447                use crate::seqstring::SeqString;
448                let ptr = sv.slot1 as *const u8;
449                let len = sv.slot2 as usize;
450                let capacity = sv.slot3 as usize;
451                let global = sv.slot4 != 0;
452                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
453                // SeqString::drop will free global strings, ignore arena strings
454            }
455            DISC_VARIANT => {
456                use crate::value::VariantData;
457                let _ = Arc::from_raw(sv.slot1 as *const VariantData);
458            }
459            DISC_MAP => {
460                use crate::value::MapKey;
461                use std::collections::HashMap;
462                let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
463            }
464            DISC_CLOSURE => {
465                // Unbox and drop the Arc<[Value]>
466                let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
467            }
468            DISC_CHANNEL => {
469                use crate::value::ChannelData;
470                let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
471            }
472            DISC_WEAVECTX => {
473                use crate::value::WeaveChannelData;
474                let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
475                let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
476            }
477            DISC_SYMBOL => {
478                // Reconstruct SeqString and let it drop (same as String)
479                use crate::seqstring::SeqString;
480                let ptr = sv.slot1 as *const u8;
481                let len = sv.slot2 as usize;
482                let capacity = sv.slot3 as usize;
483                let global = sv.slot4 != 0;
484                let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485            }
486            _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
487        }
488    }
489}
490
491// ============================================================================
492// Core Stack Operations
493// ============================================================================
494
495/// Push a value onto the stack
496///
497/// Stores the value at the current stack pointer and returns sp + 1.
498///
499/// # Safety
500/// Stack pointer must be valid and have room for the value.
501#[inline]
502pub unsafe fn push(stack: Stack, value: Value) -> Stack {
503    unsafe {
504        let sv = value_to_stack_value(value);
505        *stack = sv;
506        stack.add(1)
507    }
508}
509
510/// Push a StackValue directly onto the stack
511///
512/// # Safety
513/// Stack pointer must be valid and have room for the value.
514#[inline]
515pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
516    unsafe {
517        *stack = sv;
518        stack.add(1)
519    }
520}
521
522/// Pop a value from the stack
523///
524/// Returns (new_sp, value) where new_sp = sp - 1.
525///
526/// # Safety
527/// Stack must not be at base (must have at least one value).
528#[inline]
529pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
530    unsafe {
531        let new_sp = stack.sub(1);
532        let sv = *new_sp;
533        (new_sp, stack_value_to_value(sv))
534    }
535}
536
537/// Pop a StackValue directly from the stack
538///
539/// # Safety
540/// Stack must not be at base and must have at least one value.
541#[inline]
542pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
543    unsafe {
544        let new_sp = stack.sub(1);
545        let sv = *new_sp;
546        (new_sp, sv)
547    }
548}
549
550/// Pop two values from the stack (for binary operations)
551///
552/// Returns (new_sp, a, b) where a was below b on stack.
553/// Stack effect: ( a b -- ) returns (a, b)
554///
555/// # Safety
556/// Stack must have at least two values.
557#[inline]
558pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
559    unsafe {
560        let (sp, b) = pop(stack);
561        let (sp, a) = pop(sp);
562        (sp, a, b)
563    }
564}
565
566/// Pop three values from the stack (for ternary operations)
567///
568/// # Safety
569/// Stack must have at least three values.
570#[inline]
571pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
572    unsafe {
573        let (sp, c) = pop(stack);
574        let (sp, b) = pop(sp);
575        let (sp, a) = pop(sp);
576        (sp, a, b, c)
577    }
578}
579
580/// Peek at the top value without removing it
581///
582/// # Safety
583/// Stack must have at least one value.
584#[inline]
585pub unsafe fn peek(stack: Stack) -> Value {
586    unsafe {
587        let sv = *stack.sub(1);
588        // Don't consume - need to clone for heap types
589        stack_value_to_value(clone_stack_value(&sv))
590    }
591}
592
593/// Peek at the raw StackValue without removing it
594///
595/// # Safety
596/// Stack must have at least one value.
597#[inline]
598pub unsafe fn peek_sv(stack: Stack) -> StackValue {
599    unsafe { *stack.sub(1) }
600}
601
602// ============================================================================
603// FFI Stack Operations
604// ============================================================================
605
606/// Duplicate the top value on the stack: ( a -- a a )
607///
608/// # Safety
609/// Stack must have at least one value.
610#[unsafe(no_mangle)]
611pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
612    unsafe {
613        let sv = peek_sv(stack);
614        let cloned = clone_stack_value(&sv);
615        push_sv(stack, cloned)
616    }
617}
618
619/// Drop the top value from the stack: ( a -- )
620///
621/// # Safety
622/// Stack must have at least one value.
623#[inline]
624pub unsafe fn drop_top(stack: Stack) -> Stack {
625    unsafe {
626        let (new_sp, sv) = pop_sv(stack);
627        drop_stack_value(sv);
628        new_sp
629    }
630}
631
632/// Alias for drop to avoid LLVM keyword conflicts
633///
634/// # Safety
635/// Stack must have at least one value.
636#[unsafe(no_mangle)]
637pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
638    unsafe { drop_top(stack) }
639}
640
641/// Push an arbitrary Value onto the stack (for LLVM codegen)
642///
643/// # Safety
644/// Stack pointer must be valid and have room for the value.
645#[allow(improper_ctypes_definitions)]
646#[unsafe(no_mangle)]
647pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
648    unsafe { push(stack, value) }
649}
650
651/// Swap the top two values: ( a b -- b a )
652///
653/// # Safety
654/// Stack must have at least two values.
655#[unsafe(no_mangle)]
656pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
657    unsafe {
658        let ptr_b = stack.sub(1);
659        let ptr_a = stack.sub(2);
660        let a = *ptr_a;
661        let b = *ptr_b;
662        *ptr_a = b;
663        *ptr_b = a;
664        stack
665    }
666}
667
668/// Copy the second value to the top: ( a b -- a b a )
669///
670/// # Safety
671/// Stack must have at least two values.
672#[unsafe(no_mangle)]
673pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
674    unsafe {
675        let sv_a = *stack.sub(2);
676        let cloned = clone_stack_value(&sv_a);
677        push_sv(stack, cloned)
678    }
679}
680
681/// Rotate the top three values: ( a b c -- b c a )
682///
683/// # Safety
684/// Stack must have at least three values.
685#[unsafe(no_mangle)]
686pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
687    unsafe {
688        let ptr_c = stack.sub(1);
689        let ptr_b = stack.sub(2);
690        let ptr_a = stack.sub(3);
691        let a = *ptr_a;
692        let b = *ptr_b;
693        let c = *ptr_c;
694        *ptr_a = b;
695        *ptr_b = c;
696        *ptr_c = a;
697        stack
698    }
699}
700
701/// Remove the second value: ( a b -- b )
702///
703/// # Safety
704/// Stack must have at least two values.
705#[unsafe(no_mangle)]
706pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
707    unsafe {
708        let ptr_b = stack.sub(1);
709        let ptr_a = stack.sub(2);
710        let a = *ptr_a;
711        let b = *ptr_b;
712        drop_stack_value(a);
713        *ptr_a = b;
714        stack.sub(1)
715    }
716}
717
718/// Copy top value below second value: ( a b -- b a b )
719///
720/// # Safety
721/// Stack must have at least two values.
722#[unsafe(no_mangle)]
723pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
724    unsafe {
725        let ptr_b = stack.sub(1);
726        let ptr_a = stack.sub(2);
727        let a = *ptr_a;
728        let b = *ptr_b;
729        let b_clone = clone_stack_value(&b);
730        *ptr_a = b;
731        *ptr_b = a;
732        push_sv(stack, b_clone)
733    }
734}
735
736/// Duplicate top two values: ( a b -- a b a b )
737///
738/// # Safety
739/// Stack must have at least two values.
740#[unsafe(no_mangle)]
741pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
742    unsafe {
743        let sv_a = *stack.sub(2);
744        let sv_b = *stack.sub(1);
745        let a_clone = clone_stack_value(&sv_a);
746        let b_clone = clone_stack_value(&sv_b);
747        let sp = push_sv(stack, a_clone);
748        push_sv(sp, b_clone)
749    }
750}
751
752/// Pick: Copy the nth value to the top
753/// ( ... xn ... x1 x0 n -- ... xn ... x1 x0 xn )
754///
755/// # Safety
756/// Stack must have at least n+1 values (plus the index value).
757///
758/// # Errors
759/// Sets runtime error if:
760/// - The top value is not an Int
761/// - n is negative
762/// - n exceeds the current stack depth
763#[unsafe(no_mangle)]
764pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
765    unsafe {
766        // Get n from top of stack
767        let (sp, n_val) = pop(stack);
768        let n_raw = match n_val {
769            Value::Int(i) => i,
770            _ => {
771                // Value already consumed by pop, return sp (index consumed)
772                crate::error::set_runtime_error("pick: expected Int index on top of stack");
773                return sp;
774            }
775        };
776
777        // Bounds check: n must be non-negative
778        if n_raw < 0 {
779            crate::error::set_runtime_error(format!(
780                "pick: index cannot be negative (got {})",
781                n_raw
782            ));
783            return sp; // Return stack with index consumed
784        }
785        let n = n_raw as usize;
786
787        // Check stack depth to prevent out-of-bounds access
788        let base = get_stack_base();
789        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
790        if n >= depth {
791            crate::error::set_runtime_error(format!(
792                "pick: index {} exceeds stack depth {} (need at least {} values)",
793                n,
794                depth,
795                n + 1
796            ));
797            return sp; // Return stack with index consumed
798        }
799
800        // Get the value at depth n (0 = top after popping n)
801        let sv = *sp.sub(n + 1);
802        let cloned = clone_stack_value(&sv);
803        push_sv(sp, cloned)
804    }
805}
806
807/// Roll: Rotate n+1 items, bringing the item at depth n to the top
808/// ( x_n x_(n-1) ... x_1 x_0 n -- x_(n-1) ... x_1 x_0 x_n )
809///
810/// # Safety
811/// Stack must have at least n+1 values (plus the index value).
812///
813/// # Errors
814/// Sets runtime error if:
815/// - The top value is not an Int
816/// - n is negative
817/// - n exceeds the current stack depth
818#[unsafe(no_mangle)]
819pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
820    unsafe {
821        // Get n from top of stack
822        let (sp, n_val) = pop(stack);
823        let n_raw = match n_val {
824            Value::Int(i) => i,
825            _ => {
826                // Value already consumed by pop, return sp (index consumed)
827                crate::error::set_runtime_error("roll: expected Int index on top of stack");
828                return sp;
829            }
830        };
831
832        // Bounds check: n must be non-negative
833        if n_raw < 0 {
834            crate::error::set_runtime_error(format!(
835                "roll: index cannot be negative (got {})",
836                n_raw
837            ));
838            return sp; // Return stack with index consumed
839        }
840        let n = n_raw as usize;
841
842        if n == 0 {
843            return sp;
844        }
845        if n == 1 {
846            return patch_seq_swap(sp);
847        }
848        if n == 2 {
849            return patch_seq_rot(sp);
850        }
851
852        // Check stack depth to prevent out-of-bounds access
853        let base = get_stack_base();
854        let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
855        if n >= depth {
856            crate::error::set_runtime_error(format!(
857                "roll: index {} exceeds stack depth {} (need at least {} values)",
858                n,
859                depth,
860                n + 1
861            ));
862            return sp; // Return stack with index consumed
863        }
864
865        // General case: save item at depth n, shift others, put saved at top
866        let src_ptr = sp.sub(n + 1);
867        let saved = *src_ptr;
868
869        // Shift items down: memmove from src+1 to src, n items
870        std::ptr::copy(src_ptr.add(1), src_ptr, n);
871
872        // Put saved item at top (sp - 1)
873        *sp.sub(1) = saved;
874
875        sp
876    }
877}
878
879/// Clone a stack segment
880///
881/// Clones `count` StackValues from src to dst, handling refcounts.
882///
883/// # Safety
884/// Both src and dst must be valid stack pointers with sufficient space for count values.
885pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
886    unsafe {
887        for i in 0..count {
888            let sv = *src.sub(count - i);
889            let cloned = clone_stack_value(&sv);
890            *dst.add(i) = cloned;
891        }
892    }
893}
894
895// ============================================================================
896// Coroutine-Local Stack Base Tracking (for spawn)
897// ============================================================================
898//
899// IMPORTANT: We use May's coroutine_local! instead of thread_local! because
900// May coroutines can migrate between OS threads. Using thread_local would cause
901// STACK_BASE to be lost when a coroutine is moved to a different thread.
902
903use std::cell::Cell;
904
905// Use coroutine-local storage that moves with the coroutine
906may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
907
908/// Set the current strand's stack base (called at strand entry)
909///
910/// # Safety
911/// Base pointer must be a valid stack pointer for the current strand.
912#[unsafe(no_mangle)]
913pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
914    STACK_BASE.with(|cell| {
915        cell.set(base as usize);
916    });
917}
918
919/// Get the current strand's stack base
920#[inline]
921pub fn get_stack_base() -> Stack {
922    STACK_BASE.with(|cell| cell.get() as *mut StackValue)
923}
924
925/// Clone the current stack for spawning a child strand
926///
927/// Allocates a new stack buffer and copies all values from the current stack.
928/// Returns a pointer to the first value in the new stack (like Stack convention).
929///
930/// # Safety
931/// - Current stack must have a valid base set via set_stack_base
932/// - sp must point to a valid position within the current stack
933#[unsafe(no_mangle)]
934pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
935    unsafe {
936        let (new_sp, _base) = clone_stack_with_base(sp);
937        new_sp
938    }
939}
940
941/// Clone the current stack for spawning, returning both base and sp.
942///
943/// This is used by spawn to create a copy of the parent's stack for the child strand.
944/// Returns (new_sp, new_base) so the spawn mechanism can set STACK_BASE for the child.
945///
946/// # Safety
947/// Current stack must have a valid base set via set_stack_base and sp must point to a valid position.
948pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
949    let base = get_stack_base();
950    if base.is_null() {
951        panic!("clone_stack: stack base not set");
952    }
953
954    // Calculate depth (number of values on stack)
955    let depth = unsafe { sp.offset_from(base) as usize };
956
957    if depth == 0 {
958        // Empty stack - still need to allocate a buffer
959        use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
960        let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
961        let new_base = new_stack.base;
962        std::mem::forget(new_stack); // Don't drop - caller owns memory
963        return (new_base, new_base);
964    }
965
966    // Allocate new stack with capacity for at least the current depth
967    use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
968    let capacity = depth.max(DEFAULT_STACK_CAPACITY);
969    let new_stack = TaggedStack::new(capacity);
970    let new_base = new_stack.base;
971    std::mem::forget(new_stack); // Don't drop - caller owns memory
972
973    // Clone all values from base to sp
974    unsafe {
975        for i in 0..depth {
976            let sv = &*base.add(i);
977            let cloned = clone_stack_value(sv);
978            *new_base.add(i) = cloned;
979        }
980    }
981
982    // Return both sp and base
983    unsafe { (new_base.add(depth), new_base) }
984}
985
986// ============================================================================
987// Short Aliases for Internal/Test Use
988// ============================================================================
989
990pub use patch_seq_2dup as two_dup;
991pub use patch_seq_dup as dup;
992pub use patch_seq_nip as nip;
993pub use patch_seq_over as over;
994pub use patch_seq_pick_op as pick;
995pub use patch_seq_roll as roll;
996pub use patch_seq_rot as rot;
997pub use patch_seq_swap as swap;
998pub use patch_seq_tuck as tuck;
999
1000// ============================================================================
1001// Stack Allocation Helpers
1002// ============================================================================
1003
1004/// Allocate a new stack with default capacity.
1005/// Returns a pointer to the base of the stack (where first push goes).
1006///
1007/// # Note
1008/// The returned stack is allocated but not tracked.
1009/// The memory will be leaked when the caller is done with it.
1010/// This is used for temporary stacks in quotation calls and tests.
1011pub fn alloc_stack() -> Stack {
1012    use crate::tagged_stack::TaggedStack;
1013    let stack = TaggedStack::with_default_capacity();
1014    let base = stack.base;
1015    std::mem::forget(stack); // Don't drop - caller owns memory
1016    base
1017}
1018
1019/// Allocate a new test stack and set it as the stack base
1020/// This is used in tests that need clone_stack to work
1021pub fn alloc_test_stack() -> Stack {
1022    let stack = alloc_stack();
1023    unsafe { patch_seq_set_stack_base(stack) };
1024    stack
1025}
1026
1027/// Dump all values on the stack (for REPL debugging)
1028///
1029/// Prints all stack values in a readable format, then clears the stack.
1030/// Returns the stack base (empty stack).
1031///
1032/// # Safety
1033/// - Stack base must have been set via set_stack_base
1034/// - sp must be a valid stack pointer
1035/// - All stack values between base and sp must be valid
1036#[unsafe(no_mangle)]
1037pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1038    let base = get_stack_base();
1039    if base.is_null() {
1040        eprintln!("[stack.dump: base not set]");
1041        return sp;
1042    }
1043
1044    let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1045
1046    if depth == 0 {
1047        println!("»");
1048    } else {
1049        use std::io::Write;
1050        print!("» ");
1051        for i in 0..depth {
1052            if i > 0 {
1053                print!(" ");
1054            }
1055            unsafe {
1056                let sv = *base.add(i);
1057                print_stack_value(&sv);
1058            }
1059        }
1060        println!();
1061        // Flush stdout to ensure output is visible immediately
1062        // This prevents partial output if the program terminates unexpectedly
1063        let _ = std::io::stdout().flush();
1064
1065        // Drop all heap-allocated values to prevent memory leaks
1066        for i in 0..depth {
1067            unsafe {
1068                let sv = *base.add(i);
1069                drop_stack_value(sv);
1070            }
1071        }
1072    }
1073
1074    // Return base (empty stack)
1075    base
1076}
1077
1078/// Print a stack value in SON (Seq Object Notation) format
1079///
1080/// # Safety Requirements
1081/// The StackValue must be valid and not previously dropped. For strings,
1082/// the pointer (slot1) must point to valid, readable memory of length slot2.
1083/// This is guaranteed when called from stack.dump on freshly computed values.
1084fn print_stack_value(sv: &StackValue) {
1085    use crate::son::{SonConfig, value_to_son};
1086
1087    // Safety: We must clone the StackValue before converting to Value because
1088    // stack_value_to_value takes ownership of heap-allocated data (via Box::from_raw
1089    // for maps, Arc::from_raw for variants, etc.). Without cloning, the Value would
1090    // be dropped after printing, freeing the memory, and then the later drop_stack_value
1091    // loop would double-free. clone_stack_value properly handles refcounting.
1092    let cloned = unsafe { clone_stack_value(sv) };
1093    let value = unsafe { stack_value_to_value(cloned) };
1094    let son = value_to_son(&value, &SonConfig::compact());
1095    print!("{}", son);
1096}
1097
1098/// Macro to create a test stack
1099#[macro_export]
1100macro_rules! test_stack {
1101    () => {{
1102        use $crate::tagged_stack::StackValue;
1103        static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1104        unsafe { BUFFER.as_mut_ptr() }
1105    }};
1106}
1107
1108#[cfg(test)]
1109mod tests {
1110    use super::*;
1111
1112    #[test]
1113    fn test_pick_negative_index_sets_error() {
1114        unsafe {
1115            crate::error::clear_runtime_error();
1116            let stack = alloc_test_stack();
1117            let stack = push(stack, Value::Int(100)); // some value
1118            let stack = push(stack, Value::Int(-1)); // negative index
1119
1120            let _stack = patch_seq_pick_op(stack);
1121
1122            assert!(crate::error::has_runtime_error());
1123            let error = crate::error::take_runtime_error().unwrap();
1124            assert!(error.contains("negative"));
1125        }
1126    }
1127
1128    #[test]
1129    fn test_pick_out_of_bounds_sets_error() {
1130        unsafe {
1131            crate::error::clear_runtime_error();
1132            let stack = alloc_test_stack();
1133            let stack = push(stack, Value::Int(100)); // only one value
1134            let stack = push(stack, Value::Int(10)); // index way too large
1135
1136            let _stack = patch_seq_pick_op(stack);
1137
1138            assert!(crate::error::has_runtime_error());
1139            let error = crate::error::take_runtime_error().unwrap();
1140            assert!(error.contains("exceeds stack depth"));
1141        }
1142    }
1143
1144    #[test]
1145    fn test_roll_negative_index_sets_error() {
1146        unsafe {
1147            crate::error::clear_runtime_error();
1148            let stack = alloc_test_stack();
1149            let stack = push(stack, Value::Int(100));
1150            let stack = push(stack, Value::Int(-1)); // negative index
1151
1152            let _stack = patch_seq_roll(stack);
1153
1154            assert!(crate::error::has_runtime_error());
1155            let error = crate::error::take_runtime_error().unwrap();
1156            assert!(error.contains("negative"));
1157        }
1158    }
1159
1160    #[test]
1161    fn test_roll_out_of_bounds_sets_error() {
1162        unsafe {
1163            crate::error::clear_runtime_error();
1164            let stack = alloc_test_stack();
1165            let stack = push(stack, Value::Int(100));
1166            let stack = push(stack, Value::Int(10)); // index way too large
1167
1168            let _stack = patch_seq_roll(stack);
1169
1170            assert!(crate::error::has_runtime_error());
1171            let error = crate::error::take_runtime_error().unwrap();
1172            assert!(error.contains("exceeds stack depth"));
1173        }
1174    }
1175}