1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14pub type Stack = *mut StackValue;
19
20#[inline]
22pub fn stack_value_size() -> usize {
23 std::mem::size_of::<StackValue>()
24}
25
26pub const DISC_INT: u64 = 0;
28pub const DISC_FLOAT: u64 = 1;
29pub const DISC_BOOL: u64 = 2;
30pub const DISC_STRING: u64 = 3;
31pub const DISC_VARIANT: u64 = 4;
32pub const DISC_MAP: u64 = 5;
33pub const DISC_QUOTATION: u64 = 6;
34pub const DISC_CLOSURE: u64 = 7;
35pub const DISC_CHANNEL: u64 = 8;
36pub const DISC_WEAVECTX: u64 = 9;
37pub const DISC_SYMBOL: u64 = 10;
38
39#[inline]
41pub fn value_to_stack_value(value: Value) -> StackValue {
42 match value {
43 Value::Int(i) => StackValue {
44 slot0: DISC_INT,
45 slot1: i as u64,
46 slot2: 0,
47 slot3: 0,
48 slot4: 0,
49 },
50 Value::Float(f) => StackValue {
51 slot0: DISC_FLOAT,
52 slot1: f.to_bits(),
53 slot2: 0,
54 slot3: 0,
55 slot4: 0,
56 },
57 Value::Bool(b) => StackValue {
58 slot0: DISC_BOOL,
59 slot1: if b { 1 } else { 0 },
60 slot2: 0,
61 slot3: 0,
62 slot4: 0,
63 },
64 Value::String(s) => {
65 let (ptr, len, capacity, global) = s.into_raw_parts();
68 StackValue {
69 slot0: DISC_STRING,
70 slot1: ptr as u64,
71 slot2: len as u64,
72 slot3: capacity as u64,
73 slot4: if global { 1 } else { 0 },
74 }
75 }
76 Value::Symbol(s) => {
77 let (ptr, len, capacity, global) = s.into_raw_parts();
79 StackValue {
80 slot0: DISC_SYMBOL,
81 slot1: ptr as u64,
82 slot2: len as u64,
83 slot3: capacity as u64,
84 slot4: if global { 1 } else { 0 },
85 }
86 }
87 Value::Variant(v) => {
88 let ptr = Arc::into_raw(v) as u64;
89 StackValue {
90 slot0: DISC_VARIANT,
91 slot1: ptr,
92 slot2: 0,
93 slot3: 0,
94 slot4: 0,
95 }
96 }
97 Value::Map(m) => {
98 let ptr = Box::into_raw(m) as u64;
99 StackValue {
100 slot0: DISC_MAP,
101 slot1: ptr,
102 slot2: 0,
103 slot3: 0,
104 slot4: 0,
105 }
106 }
107 Value::Quotation { wrapper, impl_ } => StackValue {
108 slot0: DISC_QUOTATION,
109 slot1: wrapper as u64,
110 slot2: impl_ as u64,
111 slot3: 0,
112 slot4: 0,
113 },
114 Value::Closure { fn_ptr, env } => {
115 let env_box = Box::new(env);
117 let env_ptr = Box::into_raw(env_box) as u64;
118 StackValue {
119 slot0: DISC_CLOSURE,
120 slot1: fn_ptr as u64,
121 slot2: env_ptr,
122 slot3: 0,
123 slot4: 0,
124 }
125 }
126 Value::Channel(ch) => {
127 let ptr = Arc::into_raw(ch) as u64;
129 StackValue {
130 slot0: DISC_CHANNEL,
131 slot1: ptr,
132 slot2: 0,
133 slot3: 0,
134 slot4: 0,
135 }
136 }
137 Value::WeaveCtx {
138 yield_chan,
139 resume_chan,
140 } => {
141 let yield_ptr = Arc::into_raw(yield_chan) as u64;
143 let resume_ptr = Arc::into_raw(resume_chan) as u64;
144 StackValue {
145 slot0: DISC_WEAVECTX,
146 slot1: yield_ptr,
147 slot2: resume_ptr,
148 slot3: 0,
149 slot4: 0,
150 }
151 }
152 }
153}
154
155#[inline]
160pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
161 unsafe {
162 match sv.slot0 {
163 DISC_INT => Value::Int(sv.slot1 as i64),
164 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
165 DISC_BOOL => Value::Bool(sv.slot1 != 0),
166 DISC_STRING => {
167 use crate::seqstring::SeqString;
168 let ptr = sv.slot1 as *const u8;
169 let len = sv.slot2 as usize;
170 let capacity = sv.slot3 as usize;
171 let global = sv.slot4 != 0;
172 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
173 }
174 DISC_VARIANT => {
175 use crate::value::VariantData;
176 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
177 Value::Variant(arc)
178 }
179 DISC_MAP => {
180 use crate::value::MapKey;
181 use std::collections::HashMap;
182 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
183 Value::Map(boxed)
184 }
185 DISC_QUOTATION => Value::Quotation {
186 wrapper: sv.slot1 as usize,
187 impl_: sv.slot2 as usize,
188 },
189 DISC_CLOSURE => {
190 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
192 Value::Closure {
193 fn_ptr: sv.slot1 as usize,
194 env: *env_box,
195 }
196 }
197 DISC_CHANNEL => {
198 use crate::value::ChannelData;
199 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
200 Value::Channel(arc)
201 }
202 DISC_WEAVECTX => {
203 use crate::value::WeaveChannelData;
204 let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
205 let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
206 Value::WeaveCtx {
207 yield_chan,
208 resume_chan,
209 }
210 }
211 DISC_SYMBOL => {
212 use crate::seqstring::SeqString;
213 let ptr = sv.slot1 as *const u8;
214 let len = sv.slot2 as usize;
215 let capacity = sv.slot3 as usize;
216 let global = sv.slot4 != 0;
217 Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
218 }
219 _ => panic!("Invalid discriminant: {}", sv.slot0),
220 }
221 }
222}
223
224#[unsafe(no_mangle)]
231pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
232 unsafe {
233 let sv = &*src;
234 let cloned = clone_stack_value(sv);
235 *dst = cloned;
236 }
237}
238
239#[inline]
254pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
255 unsafe {
256 match sv.slot0 {
257 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
258 DISC_STRING => {
259 let ptr = sv.slot1 as *const u8;
261 let len = sv.slot2 as usize;
262 debug_assert!(!ptr.is_null(), "String pointer is null");
263 let slice = std::slice::from_raw_parts(ptr, len);
265 #[cfg(debug_assertions)]
267 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
268 #[cfg(not(debug_assertions))]
269 let s = std::str::from_utf8_unchecked(slice);
270 let cloned = crate::seqstring::global_string(s.to_string());
272 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
273 StackValue {
274 slot0: DISC_STRING,
275 slot1: new_ptr as u64,
276 slot2: new_len as u64,
277 slot3: new_cap as u64,
278 slot4: if new_global { 1 } else { 0 },
279 }
280 }
281 DISC_VARIANT => {
282 use crate::value::VariantData;
283 let ptr = sv.slot1 as *const VariantData;
284 debug_assert!(!ptr.is_null(), "Variant pointer is null");
285 debug_assert!(
286 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
287 "Variant pointer is misaligned"
288 );
289 let arc = Arc::from_raw(ptr);
290 let cloned = Arc::clone(&arc);
291 std::mem::forget(arc);
292 StackValue {
293 slot0: DISC_VARIANT,
294 slot1: Arc::into_raw(cloned) as u64,
295 slot2: 0,
296 slot3: 0,
297 slot4: 0,
298 }
299 }
300 DISC_MAP => {
301 use crate::value::MapKey;
303 use std::collections::HashMap;
304 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
305 debug_assert!(!ptr.is_null(), "Map pointer is null");
306 debug_assert!(
307 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
308 "Map pointer is misaligned"
309 );
310 let boxed = Box::from_raw(ptr);
311 let cloned = boxed.clone();
312 std::mem::forget(boxed);
313 StackValue {
314 slot0: DISC_MAP,
315 slot1: Box::into_raw(cloned) as u64,
316 slot2: 0,
317 slot3: 0,
318 slot4: 0,
319 }
320 }
321 DISC_CLOSURE => {
322 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
324 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
325 debug_assert!(
326 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
327 "Closure env pointer is misaligned"
328 );
329 let env_arc = &*env_box_ptr;
330 let cloned_env = Arc::clone(env_arc);
331 let new_env_box = Box::new(cloned_env);
333 StackValue {
334 slot0: DISC_CLOSURE,
335 slot1: sv.slot1,
336 slot2: Box::into_raw(new_env_box) as u64,
337 slot3: 0,
338 slot4: 0,
339 }
340 }
341 DISC_CHANNEL => {
342 use crate::value::ChannelData;
344 let ptr = sv.slot1 as *const ChannelData;
345 debug_assert!(!ptr.is_null(), "Channel pointer is null");
346 let arc = Arc::from_raw(ptr);
347 let cloned = Arc::clone(&arc);
348 std::mem::forget(arc);
349 StackValue {
350 slot0: DISC_CHANNEL,
351 slot1: Arc::into_raw(cloned) as u64,
352 slot2: 0,
353 slot3: 0,
354 slot4: 0,
355 }
356 }
357 DISC_WEAVECTX => {
358 use crate::value::WeaveChannelData;
360 let yield_ptr = sv.slot1 as *const WeaveChannelData;
361 let resume_ptr = sv.slot2 as *const WeaveChannelData;
362 debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
363 debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
364 let yield_arc = Arc::from_raw(yield_ptr);
365 let resume_arc = Arc::from_raw(resume_ptr);
366 let yield_cloned = Arc::clone(&yield_arc);
367 let resume_cloned = Arc::clone(&resume_arc);
368 std::mem::forget(yield_arc);
369 std::mem::forget(resume_arc);
370 StackValue {
371 slot0: DISC_WEAVECTX,
372 slot1: Arc::into_raw(yield_cloned) as u64,
373 slot2: Arc::into_raw(resume_cloned) as u64,
374 slot3: 0,
375 slot4: 0,
376 }
377 }
378 DISC_SYMBOL => {
379 let capacity = sv.slot3 as usize;
380 let is_global = sv.slot4 != 0;
381
382 if capacity == 0 && is_global {
385 let ptr = sv.slot1 as *const u8;
386 let len = sv.slot2 as usize;
387
388 debug_assert!(
392 !ptr.is_null(),
393 "Interned symbol has null pointer in clone fast path"
394 );
395
396 let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
400 let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
401 StackValue {
402 slot0: DISC_SYMBOL,
403 slot1: new_ptr as u64,
404 slot2: new_len as u64,
405 slot3: new_cap as u64,
406 slot4: if new_global { 1 } else { 0 },
407 }
408 } else {
409 let ptr = sv.slot1 as *const u8;
411 let len = sv.slot2 as usize;
412 debug_assert!(!ptr.is_null(), "Symbol pointer is null");
413 let slice = std::slice::from_raw_parts(ptr, len);
414 #[cfg(debug_assertions)]
415 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
416 #[cfg(not(debug_assertions))]
417 let s = std::str::from_utf8_unchecked(slice);
418 let cloned = crate::seqstring::global_string(s.to_string());
419 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
420 StackValue {
421 slot0: DISC_SYMBOL,
422 slot1: new_ptr as u64,
423 slot2: new_len as u64,
424 slot3: new_cap as u64,
425 slot4: if new_global { 1 } else { 0 },
426 }
427 }
428 }
429 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
430 }
431 }
432}
433
434#[inline]
439pub unsafe fn drop_stack_value(sv: StackValue) {
440 unsafe {
441 match sv.slot0 {
442 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
443 }
445 DISC_STRING => {
446 use crate::seqstring::SeqString;
448 let ptr = sv.slot1 as *const u8;
449 let len = sv.slot2 as usize;
450 let capacity = sv.slot3 as usize;
451 let global = sv.slot4 != 0;
452 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
453 }
455 DISC_VARIANT => {
456 use crate::value::VariantData;
457 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
458 }
459 DISC_MAP => {
460 use crate::value::MapKey;
461 use std::collections::HashMap;
462 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
463 }
464 DISC_CLOSURE => {
465 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
467 }
468 DISC_CHANNEL => {
469 use crate::value::ChannelData;
470 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
471 }
472 DISC_WEAVECTX => {
473 use crate::value::WeaveChannelData;
474 let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
475 let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
476 }
477 DISC_SYMBOL => {
478 use crate::seqstring::SeqString;
480 let ptr = sv.slot1 as *const u8;
481 let len = sv.slot2 as usize;
482 let capacity = sv.slot3 as usize;
483 let global = sv.slot4 != 0;
484 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485 }
486 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
487 }
488 }
489}
490
491#[inline]
502pub unsafe fn push(stack: Stack, value: Value) -> Stack {
503 unsafe {
504 let sv = value_to_stack_value(value);
505 *stack = sv;
506 stack.add(1)
507 }
508}
509
510#[inline]
515pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
516 unsafe {
517 *stack = sv;
518 stack.add(1)
519 }
520}
521
522#[inline]
529pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
530 unsafe {
531 let new_sp = stack.sub(1);
532 let sv = *new_sp;
533 (new_sp, stack_value_to_value(sv))
534 }
535}
536
537#[inline]
542pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
543 unsafe {
544 let new_sp = stack.sub(1);
545 let sv = *new_sp;
546 (new_sp, sv)
547 }
548}
549
550#[inline]
558pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
559 unsafe {
560 let (sp, b) = pop(stack);
561 let (sp, a) = pop(sp);
562 (sp, a, b)
563 }
564}
565
566#[inline]
571pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
572 unsafe {
573 let (sp, c) = pop(stack);
574 let (sp, b) = pop(sp);
575 let (sp, a) = pop(sp);
576 (sp, a, b, c)
577 }
578}
579
580#[inline]
585pub unsafe fn peek(stack: Stack) -> Value {
586 unsafe {
587 let sv = *stack.sub(1);
588 stack_value_to_value(clone_stack_value(&sv))
590 }
591}
592
593#[inline]
598pub unsafe fn peek_sv(stack: Stack) -> StackValue {
599 unsafe { *stack.sub(1) }
600}
601
602#[unsafe(no_mangle)]
611pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
612 unsafe {
613 let sv = peek_sv(stack);
614 let cloned = clone_stack_value(&sv);
615 push_sv(stack, cloned)
616 }
617}
618
619#[inline]
624pub unsafe fn drop_top(stack: Stack) -> Stack {
625 unsafe {
626 let (new_sp, sv) = pop_sv(stack);
627 drop_stack_value(sv);
628 new_sp
629 }
630}
631
632#[unsafe(no_mangle)]
637pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
638 unsafe { drop_top(stack) }
639}
640
641#[allow(improper_ctypes_definitions)]
646#[unsafe(no_mangle)]
647pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
648 unsafe { push(stack, value) }
649}
650
651#[unsafe(no_mangle)]
656pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
657 unsafe {
658 let ptr_b = stack.sub(1);
659 let ptr_a = stack.sub(2);
660 let a = *ptr_a;
661 let b = *ptr_b;
662 *ptr_a = b;
663 *ptr_b = a;
664 stack
665 }
666}
667
668#[unsafe(no_mangle)]
673pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
674 unsafe {
675 let sv_a = *stack.sub(2);
676 let cloned = clone_stack_value(&sv_a);
677 push_sv(stack, cloned)
678 }
679}
680
681#[unsafe(no_mangle)]
686pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
687 unsafe {
688 let ptr_c = stack.sub(1);
689 let ptr_b = stack.sub(2);
690 let ptr_a = stack.sub(3);
691 let a = *ptr_a;
692 let b = *ptr_b;
693 let c = *ptr_c;
694 *ptr_a = b;
695 *ptr_b = c;
696 *ptr_c = a;
697 stack
698 }
699}
700
701#[unsafe(no_mangle)]
706pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
707 unsafe {
708 let ptr_b = stack.sub(1);
709 let ptr_a = stack.sub(2);
710 let a = *ptr_a;
711 let b = *ptr_b;
712 drop_stack_value(a);
713 *ptr_a = b;
714 stack.sub(1)
715 }
716}
717
718#[unsafe(no_mangle)]
723pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
724 unsafe {
725 let ptr_b = stack.sub(1);
726 let ptr_a = stack.sub(2);
727 let a = *ptr_a;
728 let b = *ptr_b;
729 let b_clone = clone_stack_value(&b);
730 *ptr_a = b;
731 *ptr_b = a;
732 push_sv(stack, b_clone)
733 }
734}
735
736#[unsafe(no_mangle)]
741pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
742 unsafe {
743 let sv_a = *stack.sub(2);
744 let sv_b = *stack.sub(1);
745 let a_clone = clone_stack_value(&sv_a);
746 let b_clone = clone_stack_value(&sv_b);
747 let sp = push_sv(stack, a_clone);
748 push_sv(sp, b_clone)
749 }
750}
751
752#[unsafe(no_mangle)]
764pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
765 unsafe {
766 let (sp, n_val) = pop(stack);
768 let n_raw = match n_val {
769 Value::Int(i) => i,
770 _ => {
771 crate::error::set_runtime_error("pick: expected Int index on top of stack");
773 return sp;
774 }
775 };
776
777 if n_raw < 0 {
779 crate::error::set_runtime_error(format!(
780 "pick: index cannot be negative (got {})",
781 n_raw
782 ));
783 return sp; }
785 let n = n_raw as usize;
786
787 let base = get_stack_base();
789 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
790 if n >= depth {
791 crate::error::set_runtime_error(format!(
792 "pick: index {} exceeds stack depth {} (need at least {} values)",
793 n,
794 depth,
795 n + 1
796 ));
797 return sp; }
799
800 let sv = *sp.sub(n + 1);
802 let cloned = clone_stack_value(&sv);
803 push_sv(sp, cloned)
804 }
805}
806
807#[unsafe(no_mangle)]
819pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
820 unsafe {
821 let (sp, n_val) = pop(stack);
823 let n_raw = match n_val {
824 Value::Int(i) => i,
825 _ => {
826 crate::error::set_runtime_error("roll: expected Int index on top of stack");
828 return sp;
829 }
830 };
831
832 if n_raw < 0 {
834 crate::error::set_runtime_error(format!(
835 "roll: index cannot be negative (got {})",
836 n_raw
837 ));
838 return sp; }
840 let n = n_raw as usize;
841
842 if n == 0 {
843 return sp;
844 }
845 if n == 1 {
846 return patch_seq_swap(sp);
847 }
848 if n == 2 {
849 return patch_seq_rot(sp);
850 }
851
852 let base = get_stack_base();
854 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
855 if n >= depth {
856 crate::error::set_runtime_error(format!(
857 "roll: index {} exceeds stack depth {} (need at least {} values)",
858 n,
859 depth,
860 n + 1
861 ));
862 return sp; }
864
865 let src_ptr = sp.sub(n + 1);
867 let saved = *src_ptr;
868
869 std::ptr::copy(src_ptr.add(1), src_ptr, n);
871
872 *sp.sub(1) = saved;
874
875 sp
876 }
877}
878
879pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
886 unsafe {
887 for i in 0..count {
888 let sv = *src.sub(count - i);
889 let cloned = clone_stack_value(&sv);
890 *dst.add(i) = cloned;
891 }
892 }
893}
894
895use std::cell::Cell;
904
905may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
907
908#[unsafe(no_mangle)]
913pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
914 STACK_BASE.with(|cell| {
915 cell.set(base as usize);
916 });
917}
918
919#[inline]
921pub fn get_stack_base() -> Stack {
922 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
923}
924
925#[unsafe(no_mangle)]
934pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
935 unsafe {
936 let (new_sp, _base) = clone_stack_with_base(sp);
937 new_sp
938 }
939}
940
941pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
949 let base = get_stack_base();
950 if base.is_null() {
951 panic!("clone_stack: stack base not set");
952 }
953
954 let depth = unsafe { sp.offset_from(base) as usize };
956
957 if depth == 0 {
958 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
960 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
961 let new_base = new_stack.base;
962 std::mem::forget(new_stack); return (new_base, new_base);
964 }
965
966 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
968 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
969 let new_stack = TaggedStack::new(capacity);
970 let new_base = new_stack.base;
971 std::mem::forget(new_stack); unsafe {
975 for i in 0..depth {
976 let sv = &*base.add(i);
977 let cloned = clone_stack_value(sv);
978 *new_base.add(i) = cloned;
979 }
980 }
981
982 unsafe { (new_base.add(depth), new_base) }
984}
985
986pub use patch_seq_2dup as two_dup;
991pub use patch_seq_dup as dup;
992pub use patch_seq_nip as nip;
993pub use patch_seq_over as over;
994pub use patch_seq_pick_op as pick;
995pub use patch_seq_roll as roll;
996pub use patch_seq_rot as rot;
997pub use patch_seq_swap as swap;
998pub use patch_seq_tuck as tuck;
999
1000pub fn alloc_stack() -> Stack {
1012 use crate::tagged_stack::TaggedStack;
1013 let stack = TaggedStack::with_default_capacity();
1014 let base = stack.base;
1015 std::mem::forget(stack); base
1017}
1018
1019pub fn alloc_test_stack() -> Stack {
1022 let stack = alloc_stack();
1023 unsafe { patch_seq_set_stack_base(stack) };
1024 stack
1025}
1026
1027#[unsafe(no_mangle)]
1037pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1038 let base = get_stack_base();
1039 if base.is_null() {
1040 eprintln!("[stack.dump: base not set]");
1041 return sp;
1042 }
1043
1044 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1045
1046 if depth == 0 {
1047 println!("»");
1048 } else {
1049 use std::io::Write;
1050 print!("» ");
1051 for i in 0..depth {
1052 if i > 0 {
1053 print!(" ");
1054 }
1055 unsafe {
1056 let sv = *base.add(i);
1057 print_stack_value(&sv);
1058 }
1059 }
1060 println!();
1061 let _ = std::io::stdout().flush();
1064
1065 for i in 0..depth {
1067 unsafe {
1068 let sv = *base.add(i);
1069 drop_stack_value(sv);
1070 }
1071 }
1072 }
1073
1074 base
1076}
1077
1078fn print_stack_value(sv: &StackValue) {
1085 use crate::son::{SonConfig, value_to_son};
1086
1087 let cloned = unsafe { clone_stack_value(sv) };
1093 let value = unsafe { stack_value_to_value(cloned) };
1094 let son = value_to_son(&value, &SonConfig::compact());
1095 print!("{}", son);
1096}
1097
1098#[macro_export]
1100macro_rules! test_stack {
1101 () => {{
1102 use $crate::tagged_stack::StackValue;
1103 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1104 unsafe { BUFFER.as_mut_ptr() }
1105 }};
1106}
1107
1108#[cfg(test)]
1109mod tests {
1110 use super::*;
1111
1112 #[test]
1113 fn test_pick_negative_index_sets_error() {
1114 unsafe {
1115 crate::error::clear_runtime_error();
1116 let stack = alloc_test_stack();
1117 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_pick_op(stack);
1121
1122 assert!(crate::error::has_runtime_error());
1123 let error = crate::error::take_runtime_error().unwrap();
1124 assert!(error.contains("negative"));
1125 }
1126 }
1127
1128 #[test]
1129 fn test_pick_out_of_bounds_sets_error() {
1130 unsafe {
1131 crate::error::clear_runtime_error();
1132 let stack = alloc_test_stack();
1133 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(10)); let _stack = patch_seq_pick_op(stack);
1137
1138 assert!(crate::error::has_runtime_error());
1139 let error = crate::error::take_runtime_error().unwrap();
1140 assert!(error.contains("exceeds stack depth"));
1141 }
1142 }
1143
1144 #[test]
1145 fn test_roll_negative_index_sets_error() {
1146 unsafe {
1147 crate::error::clear_runtime_error();
1148 let stack = alloc_test_stack();
1149 let stack = push(stack, Value::Int(100));
1150 let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_roll(stack);
1153
1154 assert!(crate::error::has_runtime_error());
1155 let error = crate::error::take_runtime_error().unwrap();
1156 assert!(error.contains("negative"));
1157 }
1158 }
1159
1160 #[test]
1161 fn test_roll_out_of_bounds_sets_error() {
1162 unsafe {
1163 crate::error::clear_runtime_error();
1164 let stack = alloc_test_stack();
1165 let stack = push(stack, Value::Int(100));
1166 let stack = push(stack, Value::Int(10)); let _stack = patch_seq_roll(stack);
1169
1170 assert!(crate::error::has_runtime_error());
1171 let error = crate::error::take_runtime_error().unwrap();
1172 assert!(error.contains("exceeds stack depth"));
1173 }
1174 }
1175}