1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14pub type Stack = *mut StackValue;
19
20#[inline]
22pub fn stack_value_size() -> usize {
23 std::mem::size_of::<StackValue>()
24}
25
26pub const DISC_INT: u64 = 0;
28pub const DISC_FLOAT: u64 = 1;
29pub const DISC_BOOL: u64 = 2;
30pub const DISC_STRING: u64 = 3;
31pub const DISC_VARIANT: u64 = 4;
32pub const DISC_MAP: u64 = 5;
33pub const DISC_QUOTATION: u64 = 6;
34pub const DISC_CLOSURE: u64 = 7;
35pub const DISC_CHANNEL: u64 = 8;
36pub const DISC_WEAVECTX: u64 = 9;
37pub const DISC_SYMBOL: u64 = 10;
38
39#[inline]
41pub fn value_to_stack_value(value: Value) -> StackValue {
42 match value {
43 Value::Int(i) => StackValue {
44 slot0: DISC_INT,
45 slot1: i as u64,
46 slot2: 0,
47 slot3: 0,
48 slot4: 0,
49 },
50 Value::Float(f) => StackValue {
51 slot0: DISC_FLOAT,
52 slot1: f.to_bits(),
53 slot2: 0,
54 slot3: 0,
55 slot4: 0,
56 },
57 Value::Bool(b) => StackValue {
58 slot0: DISC_BOOL,
59 slot1: if b { 1 } else { 0 },
60 slot2: 0,
61 slot3: 0,
62 slot4: 0,
63 },
64 Value::String(s) => {
65 let (ptr, len, capacity, global) = s.into_raw_parts();
68 StackValue {
69 slot0: DISC_STRING,
70 slot1: ptr as u64,
71 slot2: len as u64,
72 slot3: capacity as u64,
73 slot4: if global { 1 } else { 0 },
74 }
75 }
76 Value::Symbol(s) => {
77 let (ptr, len, capacity, global) = s.into_raw_parts();
79 StackValue {
80 slot0: DISC_SYMBOL,
81 slot1: ptr as u64,
82 slot2: len as u64,
83 slot3: capacity as u64,
84 slot4: if global { 1 } else { 0 },
85 }
86 }
87 Value::Variant(v) => {
88 let ptr = Arc::into_raw(v) as u64;
89 StackValue {
90 slot0: DISC_VARIANT,
91 slot1: ptr,
92 slot2: 0,
93 slot3: 0,
94 slot4: 0,
95 }
96 }
97 Value::Map(m) => {
98 let ptr = Box::into_raw(m) as u64;
99 StackValue {
100 slot0: DISC_MAP,
101 slot1: ptr,
102 slot2: 0,
103 slot3: 0,
104 slot4: 0,
105 }
106 }
107 Value::Quotation { wrapper, impl_ } => StackValue {
108 slot0: DISC_QUOTATION,
109 slot1: wrapper as u64,
110 slot2: impl_ as u64,
111 slot3: 0,
112 slot4: 0,
113 },
114 Value::Closure { fn_ptr, env } => {
115 let env_box = Box::new(env);
117 let env_ptr = Box::into_raw(env_box) as u64;
118 StackValue {
119 slot0: DISC_CLOSURE,
120 slot1: fn_ptr as u64,
121 slot2: env_ptr,
122 slot3: 0,
123 slot4: 0,
124 }
125 }
126 Value::Channel(ch) => {
127 let ptr = Arc::into_raw(ch) as u64;
129 StackValue {
130 slot0: DISC_CHANNEL,
131 slot1: ptr,
132 slot2: 0,
133 slot3: 0,
134 slot4: 0,
135 }
136 }
137 Value::WeaveCtx {
138 yield_chan,
139 resume_chan,
140 } => {
141 let yield_ptr = Arc::into_raw(yield_chan) as u64;
143 let resume_ptr = Arc::into_raw(resume_chan) as u64;
144 StackValue {
145 slot0: DISC_WEAVECTX,
146 slot1: yield_ptr,
147 slot2: resume_ptr,
148 slot3: 0,
149 slot4: 0,
150 }
151 }
152 }
153}
154
155#[inline]
160pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
161 unsafe {
162 match sv.slot0 {
163 DISC_INT => Value::Int(sv.slot1 as i64),
164 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
165 DISC_BOOL => Value::Bool(sv.slot1 != 0),
166 DISC_STRING => {
167 use crate::seqstring::SeqString;
168 let ptr = sv.slot1 as *const u8;
169 let len = sv.slot2 as usize;
170 let capacity = sv.slot3 as usize;
171 let global = sv.slot4 != 0;
172 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
173 }
174 DISC_VARIANT => {
175 use crate::value::VariantData;
176 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
177 Value::Variant(arc)
178 }
179 DISC_MAP => {
180 use crate::value::MapKey;
181 use std::collections::HashMap;
182 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
183 Value::Map(boxed)
184 }
185 DISC_QUOTATION => Value::Quotation {
186 wrapper: sv.slot1 as usize,
187 impl_: sv.slot2 as usize,
188 },
189 DISC_CLOSURE => {
190 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
192 Value::Closure {
193 fn_ptr: sv.slot1 as usize,
194 env: *env_box,
195 }
196 }
197 DISC_CHANNEL => {
198 use crate::value::ChannelData;
199 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
200 Value::Channel(arc)
201 }
202 DISC_WEAVECTX => {
203 use crate::value::WeaveChannelData;
204 let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
205 let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
206 Value::WeaveCtx {
207 yield_chan,
208 resume_chan,
209 }
210 }
211 DISC_SYMBOL => {
212 use crate::seqstring::SeqString;
213 let ptr = sv.slot1 as *const u8;
214 let len = sv.slot2 as usize;
215 let capacity = sv.slot3 as usize;
216 let global = sv.slot4 != 0;
217 Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
218 }
219 _ => panic!("Invalid discriminant: {}", sv.slot0),
220 }
221 }
222}
223
224#[unsafe(no_mangle)]
231pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
232 unsafe {
233 let sv = &*src;
234 let cloned = clone_stack_value(sv);
235 *dst = cloned;
236 }
237}
238
239#[inline]
254pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
255 unsafe {
256 match sv.slot0 {
257 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
258 DISC_STRING => {
259 let ptr = sv.slot1 as *const u8;
261 let len = sv.slot2 as usize;
262 debug_assert!(!ptr.is_null(), "String pointer is null");
263 let slice = std::slice::from_raw_parts(ptr, len);
265 #[cfg(debug_assertions)]
267 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
268 #[cfg(not(debug_assertions))]
269 let s = std::str::from_utf8_unchecked(slice);
270 let cloned = crate::seqstring::global_string(s.to_string());
272 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
273 StackValue {
274 slot0: DISC_STRING,
275 slot1: new_ptr as u64,
276 slot2: new_len as u64,
277 slot3: new_cap as u64,
278 slot4: if new_global { 1 } else { 0 },
279 }
280 }
281 DISC_VARIANT => {
282 use crate::value::VariantData;
283 let ptr = sv.slot1 as *const VariantData;
284 debug_assert!(!ptr.is_null(), "Variant pointer is null");
285 debug_assert!(
286 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
287 "Variant pointer is misaligned"
288 );
289 let arc = Arc::from_raw(ptr);
290 let cloned = Arc::clone(&arc);
291 std::mem::forget(arc);
292 StackValue {
293 slot0: DISC_VARIANT,
294 slot1: Arc::into_raw(cloned) as u64,
295 slot2: 0,
296 slot3: 0,
297 slot4: 0,
298 }
299 }
300 DISC_MAP => {
301 use crate::value::MapKey;
303 use std::collections::HashMap;
304 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
305 debug_assert!(!ptr.is_null(), "Map pointer is null");
306 debug_assert!(
307 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
308 "Map pointer is misaligned"
309 );
310 let boxed = Box::from_raw(ptr);
311 let cloned = boxed.clone();
312 std::mem::forget(boxed);
313 StackValue {
314 slot0: DISC_MAP,
315 slot1: Box::into_raw(cloned) as u64,
316 slot2: 0,
317 slot3: 0,
318 slot4: 0,
319 }
320 }
321 DISC_CLOSURE => {
322 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
324 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
325 debug_assert!(
326 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
327 "Closure env pointer is misaligned"
328 );
329 let env_arc = &*env_box_ptr;
330 let cloned_env = Arc::clone(env_arc);
331 let new_env_box = Box::new(cloned_env);
333 StackValue {
334 slot0: DISC_CLOSURE,
335 slot1: sv.slot1,
336 slot2: Box::into_raw(new_env_box) as u64,
337 slot3: 0,
338 slot4: 0,
339 }
340 }
341 DISC_CHANNEL => {
342 use crate::value::ChannelData;
344 let ptr = sv.slot1 as *const ChannelData;
345 debug_assert!(!ptr.is_null(), "Channel pointer is null");
346 let arc = Arc::from_raw(ptr);
347 let cloned = Arc::clone(&arc);
348 std::mem::forget(arc);
349 StackValue {
350 slot0: DISC_CHANNEL,
351 slot1: Arc::into_raw(cloned) as u64,
352 slot2: 0,
353 slot3: 0,
354 slot4: 0,
355 }
356 }
357 DISC_WEAVECTX => {
358 use crate::value::WeaveChannelData;
360 let yield_ptr = sv.slot1 as *const WeaveChannelData;
361 let resume_ptr = sv.slot2 as *const WeaveChannelData;
362 debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
363 debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
364 let yield_arc = Arc::from_raw(yield_ptr);
365 let resume_arc = Arc::from_raw(resume_ptr);
366 let yield_cloned = Arc::clone(&yield_arc);
367 let resume_cloned = Arc::clone(&resume_arc);
368 std::mem::forget(yield_arc);
369 std::mem::forget(resume_arc);
370 StackValue {
371 slot0: DISC_WEAVECTX,
372 slot1: Arc::into_raw(yield_cloned) as u64,
373 slot2: Arc::into_raw(resume_cloned) as u64,
374 slot3: 0,
375 slot4: 0,
376 }
377 }
378 DISC_SYMBOL => {
379 let capacity = sv.slot3 as usize;
380 let is_global = sv.slot4 != 0;
381
382 if capacity == 0 && is_global {
385 let ptr = sv.slot1 as *const u8;
386 let len = sv.slot2 as usize;
387
388 debug_assert!(
392 !ptr.is_null(),
393 "Interned symbol has null pointer in clone fast path"
394 );
395
396 let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
400 let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
401 StackValue {
402 slot0: DISC_SYMBOL,
403 slot1: new_ptr as u64,
404 slot2: new_len as u64,
405 slot3: new_cap as u64,
406 slot4: if new_global { 1 } else { 0 },
407 }
408 } else {
409 let ptr = sv.slot1 as *const u8;
411 let len = sv.slot2 as usize;
412 debug_assert!(!ptr.is_null(), "Symbol pointer is null");
413 let slice = std::slice::from_raw_parts(ptr, len);
414 #[cfg(debug_assertions)]
415 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
416 #[cfg(not(debug_assertions))]
417 let s = std::str::from_utf8_unchecked(slice);
418 let cloned = crate::seqstring::global_string(s.to_string());
419 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
420 StackValue {
421 slot0: DISC_SYMBOL,
422 slot1: new_ptr as u64,
423 slot2: new_len as u64,
424 slot3: new_cap as u64,
425 slot4: if new_global { 1 } else { 0 },
426 }
427 }
428 }
429 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
430 }
431 }
432}
433
434#[inline]
439pub unsafe fn drop_stack_value(sv: StackValue) {
440 unsafe {
441 match sv.slot0 {
442 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
443 }
445 DISC_STRING => {
446 use crate::seqstring::SeqString;
448 let ptr = sv.slot1 as *const u8;
449 let len = sv.slot2 as usize;
450 let capacity = sv.slot3 as usize;
451 let global = sv.slot4 != 0;
452 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
453 }
455 DISC_VARIANT => {
456 use crate::value::VariantData;
457 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
458 }
459 DISC_MAP => {
460 use crate::value::MapKey;
461 use std::collections::HashMap;
462 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
463 }
464 DISC_CLOSURE => {
465 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
467 }
468 DISC_CHANNEL => {
469 use crate::value::ChannelData;
470 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
471 }
472 DISC_WEAVECTX => {
473 use crate::value::WeaveChannelData;
474 let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
475 let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
476 }
477 DISC_SYMBOL => {
478 use crate::seqstring::SeqString;
480 let ptr = sv.slot1 as *const u8;
481 let len = sv.slot2 as usize;
482 let capacity = sv.slot3 as usize;
483 let global = sv.slot4 != 0;
484 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485 }
486 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
487 }
488 }
489}
490
491#[inline]
502pub unsafe fn push(stack: Stack, value: Value) -> Stack {
503 unsafe {
504 let sv = value_to_stack_value(value);
505 *stack = sv;
506 stack.add(1)
507 }
508}
509
510#[inline]
515pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
516 unsafe {
517 *stack = sv;
518 stack.add(1)
519 }
520}
521
522#[inline]
529pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
530 unsafe {
531 let new_sp = stack.sub(1);
532 let sv = *new_sp;
533 (new_sp, stack_value_to_value(sv))
534 }
535}
536
537#[inline]
542pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
543 unsafe {
544 let new_sp = stack.sub(1);
545 let sv = *new_sp;
546 (new_sp, sv)
547 }
548}
549
550#[inline]
558pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
559 unsafe {
560 let (sp, b) = pop(stack);
561 let (sp, a) = pop(sp);
562 (sp, a, b)
563 }
564}
565
566#[inline]
571pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
572 unsafe {
573 let (sp, c) = pop(stack);
574 let (sp, b) = pop(sp);
575 let (sp, a) = pop(sp);
576 (sp, a, b, c)
577 }
578}
579
580#[inline]
585pub unsafe fn peek(stack: Stack) -> Value {
586 unsafe {
587 let sv = *stack.sub(1);
588 stack_value_to_value(clone_stack_value(&sv))
590 }
591}
592
593#[inline]
598pub unsafe fn peek_sv(stack: Stack) -> StackValue {
599 unsafe { *stack.sub(1) }
600}
601
602#[inline]
605pub fn is_empty(_stack: Stack) -> bool {
606 false
609}
610
611#[unsafe(no_mangle)]
620pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
621 unsafe {
622 let sv = peek_sv(stack);
623 let cloned = clone_stack_value(&sv);
624 push_sv(stack, cloned)
625 }
626}
627
628#[inline]
633pub unsafe fn drop_top(stack: Stack) -> Stack {
634 unsafe {
635 let (new_sp, sv) = pop_sv(stack);
636 drop_stack_value(sv);
637 new_sp
638 }
639}
640
641#[unsafe(no_mangle)]
646pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
647 unsafe { drop_top(stack) }
648}
649
650#[allow(improper_ctypes_definitions)]
655#[unsafe(no_mangle)]
656pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
657 unsafe { push(stack, value) }
658}
659
660#[unsafe(no_mangle)]
665pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
666 unsafe {
667 let ptr_b = stack.sub(1);
668 let ptr_a = stack.sub(2);
669 let a = *ptr_a;
670 let b = *ptr_b;
671 *ptr_a = b;
672 *ptr_b = a;
673 stack
674 }
675}
676
677#[unsafe(no_mangle)]
682pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
683 unsafe {
684 let sv_a = *stack.sub(2);
685 let cloned = clone_stack_value(&sv_a);
686 push_sv(stack, cloned)
687 }
688}
689
690#[unsafe(no_mangle)]
695pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
696 unsafe {
697 let ptr_c = stack.sub(1);
698 let ptr_b = stack.sub(2);
699 let ptr_a = stack.sub(3);
700 let a = *ptr_a;
701 let b = *ptr_b;
702 let c = *ptr_c;
703 *ptr_a = b;
704 *ptr_b = c;
705 *ptr_c = a;
706 stack
707 }
708}
709
710#[unsafe(no_mangle)]
715pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
716 unsafe {
717 let ptr_b = stack.sub(1);
718 let ptr_a = stack.sub(2);
719 let a = *ptr_a;
720 let b = *ptr_b;
721 drop_stack_value(a);
722 *ptr_a = b;
723 stack.sub(1)
724 }
725}
726
727#[unsafe(no_mangle)]
732pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
733 unsafe {
734 let ptr_b = stack.sub(1);
735 let ptr_a = stack.sub(2);
736 let a = *ptr_a;
737 let b = *ptr_b;
738 let b_clone = clone_stack_value(&b);
739 *ptr_a = b;
740 *ptr_b = a;
741 push_sv(stack, b_clone)
742 }
743}
744
745#[unsafe(no_mangle)]
750pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
751 unsafe {
752 let sv_a = *stack.sub(2);
753 let sv_b = *stack.sub(1);
754 let a_clone = clone_stack_value(&sv_a);
755 let b_clone = clone_stack_value(&sv_b);
756 let sp = push_sv(stack, a_clone);
757 push_sv(sp, b_clone)
758 }
759}
760
761#[unsafe(no_mangle)]
766pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
767 unsafe {
768 let (sp, sv_c) = pop_sv(stack);
769 let (sp, sv_b) = pop_sv(sp);
770 let (sp, sv_a) = pop_sv(sp);
771 drop_stack_value(sv_c);
772 drop_stack_value(sv_b);
773 drop_stack_value(sv_a);
774 sp
775 }
776}
777
778#[unsafe(no_mangle)]
790pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
791 unsafe {
792 let (sp, n_val) = pop(stack);
794 let n_raw = match n_val {
795 Value::Int(i) => i,
796 _ => {
797 crate::error::set_runtime_error("pick: expected Int index on top of stack");
799 return sp;
800 }
801 };
802
803 if n_raw < 0 {
805 crate::error::set_runtime_error(format!(
806 "pick: index cannot be negative (got {})",
807 n_raw
808 ));
809 return sp; }
811 let n = n_raw as usize;
812
813 let base = get_stack_base();
815 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
816 if n >= depth {
817 crate::error::set_runtime_error(format!(
818 "pick: index {} exceeds stack depth {} (need at least {} values)",
819 n,
820 depth,
821 n + 1
822 ));
823 return sp; }
825
826 let sv = *sp.sub(n + 1);
828 let cloned = clone_stack_value(&sv);
829 push_sv(sp, cloned)
830 }
831}
832
833#[unsafe(no_mangle)]
845pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
846 unsafe {
847 let (sp, n_val) = pop(stack);
849 let n_raw = match n_val {
850 Value::Int(i) => i,
851 _ => {
852 crate::error::set_runtime_error("roll: expected Int index on top of stack");
854 return sp;
855 }
856 };
857
858 if n_raw < 0 {
860 crate::error::set_runtime_error(format!(
861 "roll: index cannot be negative (got {})",
862 n_raw
863 ));
864 return sp; }
866 let n = n_raw as usize;
867
868 if n == 0 {
869 return sp;
870 }
871 if n == 1 {
872 return patch_seq_swap(sp);
873 }
874 if n == 2 {
875 return patch_seq_rot(sp);
876 }
877
878 let base = get_stack_base();
880 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
881 if n >= depth {
882 crate::error::set_runtime_error(format!(
883 "roll: index {} exceeds stack depth {} (need at least {} values)",
884 n,
885 depth,
886 n + 1
887 ));
888 return sp; }
890
891 let src_ptr = sp.sub(n + 1);
893 let saved = *src_ptr;
894
895 std::ptr::copy(src_ptr.add(1), src_ptr, n);
897
898 *sp.sub(1) = saved;
900
901 sp
902 }
903}
904
905pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
912 unsafe {
913 for i in 0..count {
914 let sv = *src.sub(count - i);
915 let cloned = clone_stack_value(&sv);
916 *dst.add(i) = cloned;
917 }
918 }
919}
920
921use std::cell::Cell;
930
931may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
933
934#[unsafe(no_mangle)]
939pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
940 STACK_BASE.with(|cell| {
941 cell.set(base as usize);
942 });
943}
944
945#[inline]
947pub fn get_stack_base() -> Stack {
948 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
949}
950
951#[unsafe(no_mangle)]
960pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
961 unsafe {
962 let (new_sp, _base) = clone_stack_with_base(sp);
963 new_sp
964 }
965}
966
967pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
975 let base = get_stack_base();
976 if base.is_null() {
977 panic!("clone_stack: stack base not set");
978 }
979
980 let depth = unsafe { sp.offset_from(base) as usize };
982
983 if depth == 0 {
984 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
986 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
987 let new_base = new_stack.base;
988 std::mem::forget(new_stack); return (new_base, new_base);
990 }
991
992 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
994 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
995 let new_stack = TaggedStack::new(capacity);
996 let new_base = new_stack.base;
997 std::mem::forget(new_stack); unsafe {
1001 for i in 0..depth {
1002 let sv = &*base.add(i);
1003 let cloned = clone_stack_value(sv);
1004 *new_base.add(i) = cloned;
1005 }
1006 }
1007
1008 unsafe { (new_base.add(depth), new_base) }
1010}
1011
1012pub use patch_seq_2dup as two_dup;
1017pub use patch_seq_3drop as three_drop;
1018pub use patch_seq_dup as dup;
1019pub use patch_seq_nip as nip;
1020pub use patch_seq_over as over;
1021pub use patch_seq_pick_op as pick;
1022pub use patch_seq_roll as roll;
1023pub use patch_seq_rot as rot;
1024pub use patch_seq_swap as swap;
1025pub use patch_seq_tuck as tuck;
1026
1027pub fn alloc_stack() -> Stack {
1039 use crate::tagged_stack::TaggedStack;
1040 let stack = TaggedStack::with_default_capacity();
1041 let base = stack.base;
1042 std::mem::forget(stack); base
1044}
1045
1046#[cfg(test)]
1049pub fn alloc_test_stack() -> Stack {
1050 let stack = alloc_stack();
1051 unsafe { patch_seq_set_stack_base(stack) };
1052 stack
1053}
1054
1055#[unsafe(no_mangle)]
1065pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1066 let base = get_stack_base();
1067 if base.is_null() {
1068 eprintln!("[stack.dump: base not set]");
1069 return sp;
1070 }
1071
1072 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1073
1074 if depth == 0 {
1075 println!("stack:");
1076 } else {
1077 use std::io::Write;
1078 print!("stack: ");
1079 for i in 0..depth {
1080 if i > 0 {
1081 print!(" ");
1082 }
1083 unsafe {
1084 let sv = *base.add(i);
1085 print_stack_value(&sv);
1086 }
1087 }
1088 println!();
1089 let _ = std::io::stdout().flush();
1092
1093 for i in 0..depth {
1095 unsafe {
1096 let sv = *base.add(i);
1097 drop_stack_value(sv);
1098 }
1099 }
1100 }
1101
1102 base
1104}
1105
1106fn print_stack_value(sv: &StackValue) {
1113 use crate::son::{SonConfig, value_to_son};
1114
1115 let cloned = unsafe { clone_stack_value(sv) };
1121 let value = unsafe { stack_value_to_value(cloned) };
1122 let son = value_to_son(&value, &SonConfig::compact());
1123 print!("{}", son);
1124}
1125
1126#[macro_export]
1128macro_rules! test_stack {
1129 () => {{
1130 use $crate::tagged_stack::StackValue;
1131 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1132 unsafe { BUFFER.as_mut_ptr() }
1133 }};
1134}
1135
1136#[cfg(test)]
1137mod tests {
1138 use super::*;
1139
1140 #[test]
1141 fn test_pick_negative_index_sets_error() {
1142 unsafe {
1143 crate::error::clear_runtime_error();
1144 let stack = alloc_test_stack();
1145 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_pick_op(stack);
1149
1150 assert!(crate::error::has_runtime_error());
1151 let error = crate::error::take_runtime_error().unwrap();
1152 assert!(error.contains("negative"));
1153 }
1154 }
1155
1156 #[test]
1157 fn test_pick_out_of_bounds_sets_error() {
1158 unsafe {
1159 crate::error::clear_runtime_error();
1160 let stack = alloc_test_stack();
1161 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(10)); let _stack = patch_seq_pick_op(stack);
1165
1166 assert!(crate::error::has_runtime_error());
1167 let error = crate::error::take_runtime_error().unwrap();
1168 assert!(error.contains("exceeds stack depth"));
1169 }
1170 }
1171
1172 #[test]
1173 fn test_roll_negative_index_sets_error() {
1174 unsafe {
1175 crate::error::clear_runtime_error();
1176 let stack = alloc_test_stack();
1177 let stack = push(stack, Value::Int(100));
1178 let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_roll(stack);
1181
1182 assert!(crate::error::has_runtime_error());
1183 let error = crate::error::take_runtime_error().unwrap();
1184 assert!(error.contains("negative"));
1185 }
1186 }
1187
1188 #[test]
1189 fn test_roll_out_of_bounds_sets_error() {
1190 unsafe {
1191 crate::error::clear_runtime_error();
1192 let stack = alloc_test_stack();
1193 let stack = push(stack, Value::Int(100));
1194 let stack = push(stack, Value::Int(10)); let _stack = patch_seq_roll(stack);
1197
1198 assert!(crate::error::has_runtime_error());
1199 let error = crate::error::take_runtime_error().unwrap();
1200 assert!(error.contains("exceeds stack depth"));
1201 }
1202 }
1203}