1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14pub type Stack = *mut StackValue;
19
20#[inline]
22pub fn stack_value_size() -> usize {
23 std::mem::size_of::<StackValue>()
24}
25
26pub const DISC_INT: u64 = 0;
28pub const DISC_FLOAT: u64 = 1;
29pub const DISC_BOOL: u64 = 2;
30pub const DISC_STRING: u64 = 3;
31pub const DISC_VARIANT: u64 = 4;
32pub const DISC_MAP: u64 = 5;
33pub const DISC_QUOTATION: u64 = 6;
34pub const DISC_CLOSURE: u64 = 7;
35pub const DISC_CHANNEL: u64 = 8;
36pub const DISC_WEAVECTX: u64 = 9;
37pub const DISC_SYMBOL: u64 = 10;
38
39#[inline]
41pub fn value_to_stack_value(value: Value) -> StackValue {
42 match value {
43 Value::Int(i) => StackValue {
44 slot0: DISC_INT,
45 slot1: i as u64,
46 slot2: 0,
47 slot3: 0,
48 slot4: 0,
49 },
50 Value::Float(f) => StackValue {
51 slot0: DISC_FLOAT,
52 slot1: f.to_bits(),
53 slot2: 0,
54 slot3: 0,
55 slot4: 0,
56 },
57 Value::Bool(b) => StackValue {
58 slot0: DISC_BOOL,
59 slot1: if b { 1 } else { 0 },
60 slot2: 0,
61 slot3: 0,
62 slot4: 0,
63 },
64 Value::String(s) => {
65 let (ptr, len, capacity, global) = s.into_raw_parts();
68 StackValue {
69 slot0: DISC_STRING,
70 slot1: ptr as u64,
71 slot2: len as u64,
72 slot3: capacity as u64,
73 slot4: if global { 1 } else { 0 },
74 }
75 }
76 Value::Symbol(s) => {
77 let (ptr, len, capacity, global) = s.into_raw_parts();
79 StackValue {
80 slot0: DISC_SYMBOL,
81 slot1: ptr as u64,
82 slot2: len as u64,
83 slot3: capacity as u64,
84 slot4: if global { 1 } else { 0 },
85 }
86 }
87 Value::Variant(v) => {
88 let ptr = Arc::into_raw(v) as u64;
89 StackValue {
90 slot0: DISC_VARIANT,
91 slot1: ptr,
92 slot2: 0,
93 slot3: 0,
94 slot4: 0,
95 }
96 }
97 Value::Map(m) => {
98 let ptr = Box::into_raw(m) as u64;
99 StackValue {
100 slot0: DISC_MAP,
101 slot1: ptr,
102 slot2: 0,
103 slot3: 0,
104 slot4: 0,
105 }
106 }
107 Value::Quotation { wrapper, impl_ } => StackValue {
108 slot0: DISC_QUOTATION,
109 slot1: wrapper as u64,
110 slot2: impl_ as u64,
111 slot3: 0,
112 slot4: 0,
113 },
114 Value::Closure { fn_ptr, env } => {
115 let env_box = Box::new(env);
117 let env_ptr = Box::into_raw(env_box) as u64;
118 StackValue {
119 slot0: DISC_CLOSURE,
120 slot1: fn_ptr as u64,
121 slot2: env_ptr,
122 slot3: 0,
123 slot4: 0,
124 }
125 }
126 Value::Channel(ch) => {
127 let ptr = Arc::into_raw(ch) as u64;
129 StackValue {
130 slot0: DISC_CHANNEL,
131 slot1: ptr,
132 slot2: 0,
133 slot3: 0,
134 slot4: 0,
135 }
136 }
137 Value::WeaveCtx {
138 yield_chan,
139 resume_chan,
140 } => {
141 let yield_ptr = Arc::into_raw(yield_chan) as u64;
143 let resume_ptr = Arc::into_raw(resume_chan) as u64;
144 StackValue {
145 slot0: DISC_WEAVECTX,
146 slot1: yield_ptr,
147 slot2: resume_ptr,
148 slot3: 0,
149 slot4: 0,
150 }
151 }
152 }
153}
154
155#[inline]
160pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
161 unsafe {
162 match sv.slot0 {
163 DISC_INT => Value::Int(sv.slot1 as i64),
164 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
165 DISC_BOOL => Value::Bool(sv.slot1 != 0),
166 DISC_STRING => {
167 use crate::seqstring::SeqString;
168 let ptr = sv.slot1 as *const u8;
169 let len = sv.slot2 as usize;
170 let capacity = sv.slot3 as usize;
171 let global = sv.slot4 != 0;
172 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
173 }
174 DISC_VARIANT => {
175 use crate::value::VariantData;
176 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
177 Value::Variant(arc)
178 }
179 DISC_MAP => {
180 use crate::value::MapKey;
181 use std::collections::HashMap;
182 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
183 Value::Map(boxed)
184 }
185 DISC_QUOTATION => Value::Quotation {
186 wrapper: sv.slot1 as usize,
187 impl_: sv.slot2 as usize,
188 },
189 DISC_CLOSURE => {
190 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
192 Value::Closure {
193 fn_ptr: sv.slot1 as usize,
194 env: *env_box,
195 }
196 }
197 DISC_CHANNEL => {
198 use crate::value::ChannelData;
199 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
200 Value::Channel(arc)
201 }
202 DISC_WEAVECTX => {
203 use crate::value::WeaveChannelData;
204 let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
205 let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
206 Value::WeaveCtx {
207 yield_chan,
208 resume_chan,
209 }
210 }
211 DISC_SYMBOL => {
212 use crate::seqstring::SeqString;
213 let ptr = sv.slot1 as *const u8;
214 let len = sv.slot2 as usize;
215 let capacity = sv.slot3 as usize;
216 let global = sv.slot4 != 0;
217 Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
218 }
219 _ => panic!("Invalid discriminant: {}", sv.slot0),
220 }
221 }
222}
223
224#[unsafe(no_mangle)]
231pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
232 unsafe {
233 let sv = &*src;
234 let cloned = clone_stack_value(sv);
235 *dst = cloned;
236 }
237}
238
239#[inline]
254pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
255 unsafe {
256 match sv.slot0 {
257 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
258 DISC_STRING => {
259 let ptr = sv.slot1 as *const u8;
261 let len = sv.slot2 as usize;
262 debug_assert!(!ptr.is_null(), "String pointer is null");
263 let slice = std::slice::from_raw_parts(ptr, len);
265 #[cfg(debug_assertions)]
267 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
268 #[cfg(not(debug_assertions))]
269 let s = std::str::from_utf8_unchecked(slice);
270 let cloned = crate::seqstring::global_string(s.to_string());
272 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
273 StackValue {
274 slot0: DISC_STRING,
275 slot1: new_ptr as u64,
276 slot2: new_len as u64,
277 slot3: new_cap as u64,
278 slot4: if new_global { 1 } else { 0 },
279 }
280 }
281 DISC_VARIANT => {
282 use crate::value::VariantData;
283 let ptr = sv.slot1 as *const VariantData;
284 debug_assert!(!ptr.is_null(), "Variant pointer is null");
285 debug_assert!(
286 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
287 "Variant pointer is misaligned"
288 );
289 let arc = Arc::from_raw(ptr);
290 let cloned = Arc::clone(&arc);
291 std::mem::forget(arc);
292 StackValue {
293 slot0: DISC_VARIANT,
294 slot1: Arc::into_raw(cloned) as u64,
295 slot2: 0,
296 slot3: 0,
297 slot4: 0,
298 }
299 }
300 DISC_MAP => {
301 use crate::value::MapKey;
303 use std::collections::HashMap;
304 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
305 debug_assert!(!ptr.is_null(), "Map pointer is null");
306 debug_assert!(
307 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
308 "Map pointer is misaligned"
309 );
310 let boxed = Box::from_raw(ptr);
311 let cloned = boxed.clone();
312 std::mem::forget(boxed);
313 StackValue {
314 slot0: DISC_MAP,
315 slot1: Box::into_raw(cloned) as u64,
316 slot2: 0,
317 slot3: 0,
318 slot4: 0,
319 }
320 }
321 DISC_CLOSURE => {
322 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
324 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
325 debug_assert!(
326 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
327 "Closure env pointer is misaligned"
328 );
329 let env_arc = &*env_box_ptr;
330 let cloned_env = Arc::clone(env_arc);
331 let new_env_box = Box::new(cloned_env);
333 StackValue {
334 slot0: DISC_CLOSURE,
335 slot1: sv.slot1,
336 slot2: Box::into_raw(new_env_box) as u64,
337 slot3: 0,
338 slot4: 0,
339 }
340 }
341 DISC_CHANNEL => {
342 use crate::value::ChannelData;
344 let ptr = sv.slot1 as *const ChannelData;
345 debug_assert!(!ptr.is_null(), "Channel pointer is null");
346 let arc = Arc::from_raw(ptr);
347 let cloned = Arc::clone(&arc);
348 std::mem::forget(arc);
349 StackValue {
350 slot0: DISC_CHANNEL,
351 slot1: Arc::into_raw(cloned) as u64,
352 slot2: 0,
353 slot3: 0,
354 slot4: 0,
355 }
356 }
357 DISC_WEAVECTX => {
358 use crate::value::WeaveChannelData;
360 let yield_ptr = sv.slot1 as *const WeaveChannelData;
361 let resume_ptr = sv.slot2 as *const WeaveChannelData;
362 debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
363 debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
364 let yield_arc = Arc::from_raw(yield_ptr);
365 let resume_arc = Arc::from_raw(resume_ptr);
366 let yield_cloned = Arc::clone(&yield_arc);
367 let resume_cloned = Arc::clone(&resume_arc);
368 std::mem::forget(yield_arc);
369 std::mem::forget(resume_arc);
370 StackValue {
371 slot0: DISC_WEAVECTX,
372 slot1: Arc::into_raw(yield_cloned) as u64,
373 slot2: Arc::into_raw(resume_cloned) as u64,
374 slot3: 0,
375 slot4: 0,
376 }
377 }
378 DISC_SYMBOL => {
379 let capacity = sv.slot3 as usize;
380 let is_global = sv.slot4 != 0;
381
382 if capacity == 0 && is_global {
385 let ptr = sv.slot1 as *const u8;
386 let len = sv.slot2 as usize;
387
388 debug_assert!(
392 !ptr.is_null(),
393 "Interned symbol has null pointer in clone fast path"
394 );
395
396 let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
400 let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
401 StackValue {
402 slot0: DISC_SYMBOL,
403 slot1: new_ptr as u64,
404 slot2: new_len as u64,
405 slot3: new_cap as u64,
406 slot4: if new_global { 1 } else { 0 },
407 }
408 } else {
409 let ptr = sv.slot1 as *const u8;
411 let len = sv.slot2 as usize;
412 debug_assert!(!ptr.is_null(), "Symbol pointer is null");
413 let slice = std::slice::from_raw_parts(ptr, len);
414 #[cfg(debug_assertions)]
415 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
416 #[cfg(not(debug_assertions))]
417 let s = std::str::from_utf8_unchecked(slice);
418 let cloned = crate::seqstring::global_string(s.to_string());
419 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
420 StackValue {
421 slot0: DISC_SYMBOL,
422 slot1: new_ptr as u64,
423 slot2: new_len as u64,
424 slot3: new_cap as u64,
425 slot4: if new_global { 1 } else { 0 },
426 }
427 }
428 }
429 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
430 }
431 }
432}
433
434#[inline]
439pub unsafe fn drop_stack_value(sv: StackValue) {
440 unsafe {
441 match sv.slot0 {
442 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
443 }
445 DISC_STRING => {
446 use crate::seqstring::SeqString;
448 let ptr = sv.slot1 as *const u8;
449 let len = sv.slot2 as usize;
450 let capacity = sv.slot3 as usize;
451 let global = sv.slot4 != 0;
452 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
453 }
455 DISC_VARIANT => {
456 use crate::value::VariantData;
457 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
458 }
459 DISC_MAP => {
460 use crate::value::MapKey;
461 use std::collections::HashMap;
462 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
463 }
464 DISC_CLOSURE => {
465 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
467 }
468 DISC_CHANNEL => {
469 use crate::value::ChannelData;
470 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
471 }
472 DISC_WEAVECTX => {
473 use crate::value::WeaveChannelData;
474 let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
475 let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
476 }
477 DISC_SYMBOL => {
478 use crate::seqstring::SeqString;
480 let ptr = sv.slot1 as *const u8;
481 let len = sv.slot2 as usize;
482 let capacity = sv.slot3 as usize;
483 let global = sv.slot4 != 0;
484 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485 }
486 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
487 }
488 }
489}
490
491#[inline]
502pub unsafe fn push(stack: Stack, value: Value) -> Stack {
503 unsafe {
504 let sv = value_to_stack_value(value);
505 *stack = sv;
506 stack.add(1)
507 }
508}
509
510#[inline]
515pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
516 unsafe {
517 *stack = sv;
518 stack.add(1)
519 }
520}
521
522#[inline]
529pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
530 unsafe {
531 let new_sp = stack.sub(1);
532 let sv = *new_sp;
533 (new_sp, stack_value_to_value(sv))
534 }
535}
536
537#[inline]
542pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
543 unsafe {
544 let new_sp = stack.sub(1);
545 let sv = *new_sp;
546 (new_sp, sv)
547 }
548}
549
550#[inline]
558pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
559 unsafe {
560 let (sp, b) = pop(stack);
561 let (sp, a) = pop(sp);
562 (sp, a, b)
563 }
564}
565
566#[inline]
571pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
572 unsafe {
573 let (sp, c) = pop(stack);
574 let (sp, b) = pop(sp);
575 let (sp, a) = pop(sp);
576 (sp, a, b, c)
577 }
578}
579
580#[inline]
585pub unsafe fn peek(stack: Stack) -> Value {
586 unsafe {
587 let sv = *stack.sub(1);
588 stack_value_to_value(clone_stack_value(&sv))
590 }
591}
592
593#[inline]
598pub unsafe fn peek_sv(stack: Stack) -> StackValue {
599 unsafe { *stack.sub(1) }
600}
601
602#[inline]
605pub fn is_empty(_stack: Stack) -> bool {
606 false
609}
610
611#[unsafe(no_mangle)]
620pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
621 unsafe {
622 let sv = peek_sv(stack);
623 let cloned = clone_stack_value(&sv);
624 push_sv(stack, cloned)
625 }
626}
627
628#[inline]
633pub unsafe fn drop_top(stack: Stack) -> Stack {
634 unsafe {
635 let (new_sp, sv) = pop_sv(stack);
636 drop_stack_value(sv);
637 new_sp
638 }
639}
640
641#[unsafe(no_mangle)]
646pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
647 unsafe { drop_top(stack) }
648}
649
650#[allow(improper_ctypes_definitions)]
655#[unsafe(no_mangle)]
656pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
657 unsafe { push(stack, value) }
658}
659
660#[unsafe(no_mangle)]
665pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
666 unsafe {
667 let ptr_b = stack.sub(1);
668 let ptr_a = stack.sub(2);
669 let a = *ptr_a;
670 let b = *ptr_b;
671 *ptr_a = b;
672 *ptr_b = a;
673 stack
674 }
675}
676
677#[unsafe(no_mangle)]
682pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
683 unsafe {
684 let sv_a = *stack.sub(2);
685 let cloned = clone_stack_value(&sv_a);
686 push_sv(stack, cloned)
687 }
688}
689
690#[unsafe(no_mangle)]
695pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
696 unsafe {
697 let ptr_c = stack.sub(1);
698 let ptr_b = stack.sub(2);
699 let ptr_a = stack.sub(3);
700 let a = *ptr_a;
701 let b = *ptr_b;
702 let c = *ptr_c;
703 *ptr_a = b;
704 *ptr_b = c;
705 *ptr_c = a;
706 stack
707 }
708}
709
710#[unsafe(no_mangle)]
715pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
716 unsafe {
717 let ptr_b = stack.sub(1);
718 let ptr_a = stack.sub(2);
719 let a = *ptr_a;
720 let b = *ptr_b;
721 drop_stack_value(a);
722 *ptr_a = b;
723 stack.sub(1)
724 }
725}
726
727#[unsafe(no_mangle)]
732pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
733 unsafe {
734 let ptr_b = stack.sub(1);
735 let ptr_a = stack.sub(2);
736 let a = *ptr_a;
737 let b = *ptr_b;
738 let b_clone = clone_stack_value(&b);
739 *ptr_a = b;
740 *ptr_b = a;
741 push_sv(stack, b_clone)
742 }
743}
744
745#[unsafe(no_mangle)]
750pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
751 unsafe {
752 let sv_a = *stack.sub(2);
753 let sv_b = *stack.sub(1);
754 let a_clone = clone_stack_value(&sv_a);
755 let b_clone = clone_stack_value(&sv_b);
756 let sp = push_sv(stack, a_clone);
757 push_sv(sp, b_clone)
758 }
759}
760
761#[unsafe(no_mangle)]
773pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
774 unsafe {
775 let (sp, n_val) = pop(stack);
777 let n_raw = match n_val {
778 Value::Int(i) => i,
779 _ => {
780 crate::error::set_runtime_error("pick: expected Int index on top of stack");
782 return sp;
783 }
784 };
785
786 if n_raw < 0 {
788 crate::error::set_runtime_error(format!(
789 "pick: index cannot be negative (got {})",
790 n_raw
791 ));
792 return sp; }
794 let n = n_raw as usize;
795
796 let base = get_stack_base();
798 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
799 if n >= depth {
800 crate::error::set_runtime_error(format!(
801 "pick: index {} exceeds stack depth {} (need at least {} values)",
802 n,
803 depth,
804 n + 1
805 ));
806 return sp; }
808
809 let sv = *sp.sub(n + 1);
811 let cloned = clone_stack_value(&sv);
812 push_sv(sp, cloned)
813 }
814}
815
816#[unsafe(no_mangle)]
828pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
829 unsafe {
830 let (sp, n_val) = pop(stack);
832 let n_raw = match n_val {
833 Value::Int(i) => i,
834 _ => {
835 crate::error::set_runtime_error("roll: expected Int index on top of stack");
837 return sp;
838 }
839 };
840
841 if n_raw < 0 {
843 crate::error::set_runtime_error(format!(
844 "roll: index cannot be negative (got {})",
845 n_raw
846 ));
847 return sp; }
849 let n = n_raw as usize;
850
851 if n == 0 {
852 return sp;
853 }
854 if n == 1 {
855 return patch_seq_swap(sp);
856 }
857 if n == 2 {
858 return patch_seq_rot(sp);
859 }
860
861 let base = get_stack_base();
863 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
864 if n >= depth {
865 crate::error::set_runtime_error(format!(
866 "roll: index {} exceeds stack depth {} (need at least {} values)",
867 n,
868 depth,
869 n + 1
870 ));
871 return sp; }
873
874 let src_ptr = sp.sub(n + 1);
876 let saved = *src_ptr;
877
878 std::ptr::copy(src_ptr.add(1), src_ptr, n);
880
881 *sp.sub(1) = saved;
883
884 sp
885 }
886}
887
888pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
895 unsafe {
896 for i in 0..count {
897 let sv = *src.sub(count - i);
898 let cloned = clone_stack_value(&sv);
899 *dst.add(i) = cloned;
900 }
901 }
902}
903
904use std::cell::Cell;
913
914may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
916
917#[unsafe(no_mangle)]
922pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
923 STACK_BASE.with(|cell| {
924 cell.set(base as usize);
925 });
926}
927
928#[inline]
930pub fn get_stack_base() -> Stack {
931 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
932}
933
934#[unsafe(no_mangle)]
943pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
944 unsafe {
945 let (new_sp, _base) = clone_stack_with_base(sp);
946 new_sp
947 }
948}
949
950pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
958 let base = get_stack_base();
959 if base.is_null() {
960 panic!("clone_stack: stack base not set");
961 }
962
963 let depth = unsafe { sp.offset_from(base) as usize };
965
966 if depth == 0 {
967 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
969 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
970 let new_base = new_stack.base;
971 std::mem::forget(new_stack); return (new_base, new_base);
973 }
974
975 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
977 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
978 let new_stack = TaggedStack::new(capacity);
979 let new_base = new_stack.base;
980 std::mem::forget(new_stack); unsafe {
984 for i in 0..depth {
985 let sv = &*base.add(i);
986 let cloned = clone_stack_value(sv);
987 *new_base.add(i) = cloned;
988 }
989 }
990
991 unsafe { (new_base.add(depth), new_base) }
993}
994
995pub use patch_seq_2dup as two_dup;
1000pub use patch_seq_dup as dup;
1001pub use patch_seq_nip as nip;
1002pub use patch_seq_over as over;
1003pub use patch_seq_pick_op as pick;
1004pub use patch_seq_roll as roll;
1005pub use patch_seq_rot as rot;
1006pub use patch_seq_swap as swap;
1007pub use patch_seq_tuck as tuck;
1008
1009pub fn alloc_stack() -> Stack {
1021 use crate::tagged_stack::TaggedStack;
1022 let stack = TaggedStack::with_default_capacity();
1023 let base = stack.base;
1024 std::mem::forget(stack); base
1026}
1027
1028pub fn alloc_test_stack() -> Stack {
1031 let stack = alloc_stack();
1032 unsafe { patch_seq_set_stack_base(stack) };
1033 stack
1034}
1035
1036#[unsafe(no_mangle)]
1046pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1047 let base = get_stack_base();
1048 if base.is_null() {
1049 eprintln!("[stack.dump: base not set]");
1050 return sp;
1051 }
1052
1053 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1054
1055 if depth == 0 {
1056 println!("»");
1057 } else {
1058 use std::io::Write;
1059 print!("» ");
1060 for i in 0..depth {
1061 if i > 0 {
1062 print!(" ");
1063 }
1064 unsafe {
1065 let sv = *base.add(i);
1066 print_stack_value(&sv);
1067 }
1068 }
1069 println!();
1070 let _ = std::io::stdout().flush();
1073
1074 for i in 0..depth {
1076 unsafe {
1077 let sv = *base.add(i);
1078 drop_stack_value(sv);
1079 }
1080 }
1081 }
1082
1083 base
1085}
1086
1087fn print_stack_value(sv: &StackValue) {
1094 use crate::son::{SonConfig, value_to_son};
1095
1096 let cloned = unsafe { clone_stack_value(sv) };
1102 let value = unsafe { stack_value_to_value(cloned) };
1103 let son = value_to_son(&value, &SonConfig::compact());
1104 print!("{}", son);
1105}
1106
1107#[macro_export]
1109macro_rules! test_stack {
1110 () => {{
1111 use $crate::tagged_stack::StackValue;
1112 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1113 unsafe { BUFFER.as_mut_ptr() }
1114 }};
1115}
1116
1117#[cfg(test)]
1118mod tests {
1119 use super::*;
1120
1121 #[test]
1122 fn test_pick_negative_index_sets_error() {
1123 unsafe {
1124 crate::error::clear_runtime_error();
1125 let stack = alloc_test_stack();
1126 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_pick_op(stack);
1130
1131 assert!(crate::error::has_runtime_error());
1132 let error = crate::error::take_runtime_error().unwrap();
1133 assert!(error.contains("negative"));
1134 }
1135 }
1136
1137 #[test]
1138 fn test_pick_out_of_bounds_sets_error() {
1139 unsafe {
1140 crate::error::clear_runtime_error();
1141 let stack = alloc_test_stack();
1142 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(10)); let _stack = patch_seq_pick_op(stack);
1146
1147 assert!(crate::error::has_runtime_error());
1148 let error = crate::error::take_runtime_error().unwrap();
1149 assert!(error.contains("exceeds stack depth"));
1150 }
1151 }
1152
1153 #[test]
1154 fn test_roll_negative_index_sets_error() {
1155 unsafe {
1156 crate::error::clear_runtime_error();
1157 let stack = alloc_test_stack();
1158 let stack = push(stack, Value::Int(100));
1159 let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_roll(stack);
1162
1163 assert!(crate::error::has_runtime_error());
1164 let error = crate::error::take_runtime_error().unwrap();
1165 assert!(error.contains("negative"));
1166 }
1167 }
1168
1169 #[test]
1170 fn test_roll_out_of_bounds_sets_error() {
1171 unsafe {
1172 crate::error::clear_runtime_error();
1173 let stack = alloc_test_stack();
1174 let stack = push(stack, Value::Int(100));
1175 let stack = push(stack, Value::Int(10)); let _stack = patch_seq_roll(stack);
1178
1179 assert!(crate::error::has_runtime_error());
1180 let error = crate::error::take_runtime_error().unwrap();
1181 assert!(error.contains("exceeds stack depth"));
1182 }
1183 }
1184}