1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12#[cfg(not(feature = "nanbox"))]
13use std::sync::Arc;
14
15pub type Stack = *mut StackValue;
20
21#[inline]
23pub fn stack_value_size() -> usize {
24 std::mem::size_of::<StackValue>()
25}
26
27pub const DISC_INT: u64 = 0;
29pub const DISC_FLOAT: u64 = 1;
30pub const DISC_BOOL: u64 = 2;
31pub const DISC_STRING: u64 = 3;
32pub const DISC_VARIANT: u64 = 4;
33pub const DISC_MAP: u64 = 5;
34pub const DISC_QUOTATION: u64 = 6;
35pub const DISC_CLOSURE: u64 = 7;
36pub const DISC_CHANNEL: u64 = 8;
37pub const DISC_WEAVECTX: u64 = 9;
38pub const DISC_SYMBOL: u64 = 10;
39
40#[cfg(not(feature = "nanbox"))]
42#[inline]
43pub fn value_to_stack_value(value: Value) -> StackValue {
44 match value {
45 Value::Int(i) => StackValue {
46 slot0: DISC_INT,
47 slot1: i as u64,
48 slot2: 0,
49 slot3: 0,
50 slot4: 0,
51 },
52 Value::Float(f) => StackValue {
53 slot0: DISC_FLOAT,
54 slot1: f.to_bits(),
55 slot2: 0,
56 slot3: 0,
57 slot4: 0,
58 },
59 Value::Bool(b) => StackValue {
60 slot0: DISC_BOOL,
61 slot1: if b { 1 } else { 0 },
62 slot2: 0,
63 slot3: 0,
64 slot4: 0,
65 },
66 Value::String(s) => {
67 let (ptr, len, capacity, global) = s.into_raw_parts();
70 StackValue {
71 slot0: DISC_STRING,
72 slot1: ptr as u64,
73 slot2: len as u64,
74 slot3: capacity as u64,
75 slot4: if global { 1 } else { 0 },
76 }
77 }
78 Value::Symbol(s) => {
79 let (ptr, len, capacity, global) = s.into_raw_parts();
81 StackValue {
82 slot0: DISC_SYMBOL,
83 slot1: ptr as u64,
84 slot2: len as u64,
85 slot3: capacity as u64,
86 slot4: if global { 1 } else { 0 },
87 }
88 }
89 Value::Variant(v) => {
90 let ptr = Arc::into_raw(v) as u64;
91 StackValue {
92 slot0: DISC_VARIANT,
93 slot1: ptr,
94 slot2: 0,
95 slot3: 0,
96 slot4: 0,
97 }
98 }
99 Value::Map(m) => {
100 let ptr = Box::into_raw(m) as u64;
101 StackValue {
102 slot0: DISC_MAP,
103 slot1: ptr,
104 slot2: 0,
105 slot3: 0,
106 slot4: 0,
107 }
108 }
109 Value::Quotation { wrapper, impl_ } => StackValue {
110 slot0: DISC_QUOTATION,
111 slot1: wrapper as u64,
112 slot2: impl_ as u64,
113 slot3: 0,
114 slot4: 0,
115 },
116 Value::Closure { fn_ptr, env } => {
117 let env_box = Box::new(env);
119 let env_ptr = Box::into_raw(env_box) as u64;
120 StackValue {
121 slot0: DISC_CLOSURE,
122 slot1: fn_ptr as u64,
123 slot2: env_ptr,
124 slot3: 0,
125 slot4: 0,
126 }
127 }
128 Value::Channel(ch) => {
129 let ptr = Arc::into_raw(ch) as u64;
131 StackValue {
132 slot0: DISC_CHANNEL,
133 slot1: ptr,
134 slot2: 0,
135 slot3: 0,
136 slot4: 0,
137 }
138 }
139 Value::WeaveCtx {
140 yield_chan,
141 resume_chan,
142 } => {
143 let yield_ptr = Arc::into_raw(yield_chan) as u64;
145 let resume_ptr = Arc::into_raw(resume_chan) as u64;
146 StackValue {
147 slot0: DISC_WEAVECTX,
148 slot1: yield_ptr,
149 slot2: resume_ptr,
150 slot3: 0,
151 slot4: 0,
152 }
153 }
154 }
155}
156
157#[cfg(feature = "nanbox")]
159#[inline]
160pub fn value_to_stack_value(value: Value) -> StackValue {
161 StackValue(crate::nanbox::NanBoxedValue::from_value(&value))
162}
163
164#[cfg(not(feature = "nanbox"))]
169#[inline]
170pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
171 unsafe {
172 match sv.slot0 {
173 DISC_INT => Value::Int(sv.slot1 as i64),
174 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
175 DISC_BOOL => Value::Bool(sv.slot1 != 0),
176 DISC_STRING => {
177 use crate::seqstring::SeqString;
178 let ptr = sv.slot1 as *const u8;
179 let len = sv.slot2 as usize;
180 let capacity = sv.slot3 as usize;
181 let global = sv.slot4 != 0;
182 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
183 }
184 DISC_VARIANT => {
185 use crate::value::VariantData;
186 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
187 Value::Variant(arc)
188 }
189 DISC_MAP => {
190 use crate::value::MapKey;
191 use std::collections::HashMap;
192 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
193 Value::Map(boxed)
194 }
195 DISC_QUOTATION => Value::Quotation {
196 wrapper: sv.slot1 as usize,
197 impl_: sv.slot2 as usize,
198 },
199 DISC_CLOSURE => {
200 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
202 Value::Closure {
203 fn_ptr: sv.slot1 as usize,
204 env: *env_box,
205 }
206 }
207 DISC_CHANNEL => {
208 use crate::value::ChannelData;
209 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
210 Value::Channel(arc)
211 }
212 DISC_WEAVECTX => {
213 use crate::value::WeaveChannelData;
214 let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
215 let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
216 Value::WeaveCtx {
217 yield_chan,
218 resume_chan,
219 }
220 }
221 DISC_SYMBOL => {
222 use crate::seqstring::SeqString;
223 let ptr = sv.slot1 as *const u8;
224 let len = sv.slot2 as usize;
225 let capacity = sv.slot3 as usize;
226 let global = sv.slot4 != 0;
227 Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
228 }
229 _ => panic!("Invalid discriminant: {}", sv.slot0),
230 }
231 }
232}
233
234#[cfg(feature = "nanbox")]
239#[inline]
240pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
241 unsafe { sv.0.to_value() }
242}
243
244#[unsafe(no_mangle)]
251pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
252 unsafe {
253 let sv = &*src;
254 let cloned = clone_stack_value(sv);
255 *dst = cloned;
256 }
257}
258
259#[cfg(not(feature = "nanbox"))]
274#[inline]
275pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
276 unsafe {
277 match sv.slot0 {
278 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
279 DISC_STRING => {
280 let ptr = sv.slot1 as *const u8;
282 let len = sv.slot2 as usize;
283 debug_assert!(!ptr.is_null(), "String pointer is null");
284 let slice = std::slice::from_raw_parts(ptr, len);
286 #[cfg(debug_assertions)]
288 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
289 #[cfg(not(debug_assertions))]
290 let s = std::str::from_utf8_unchecked(slice);
291 let cloned = crate::seqstring::global_string(s.to_string());
293 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
294 StackValue {
295 slot0: DISC_STRING,
296 slot1: new_ptr as u64,
297 slot2: new_len as u64,
298 slot3: new_cap as u64,
299 slot4: if new_global { 1 } else { 0 },
300 }
301 }
302 DISC_VARIANT => {
303 use crate::value::VariantData;
304 let ptr = sv.slot1 as *const VariantData;
305 debug_assert!(!ptr.is_null(), "Variant pointer is null");
306 debug_assert!(
307 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
308 "Variant pointer is misaligned"
309 );
310 let arc = Arc::from_raw(ptr);
311 let cloned = Arc::clone(&arc);
312 std::mem::forget(arc);
313 StackValue {
314 slot0: DISC_VARIANT,
315 slot1: Arc::into_raw(cloned) as u64,
316 slot2: 0,
317 slot3: 0,
318 slot4: 0,
319 }
320 }
321 DISC_MAP => {
322 use crate::value::MapKey;
324 use std::collections::HashMap;
325 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
326 debug_assert!(!ptr.is_null(), "Map pointer is null");
327 debug_assert!(
328 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
329 "Map pointer is misaligned"
330 );
331 let boxed = Box::from_raw(ptr);
332 let cloned = boxed.clone();
333 std::mem::forget(boxed);
334 StackValue {
335 slot0: DISC_MAP,
336 slot1: Box::into_raw(cloned) as u64,
337 slot2: 0,
338 slot3: 0,
339 slot4: 0,
340 }
341 }
342 DISC_CLOSURE => {
343 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
345 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
346 debug_assert!(
347 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
348 "Closure env pointer is misaligned"
349 );
350 let env_arc = &*env_box_ptr;
351 let cloned_env = Arc::clone(env_arc);
352 let new_env_box = Box::new(cloned_env);
354 StackValue {
355 slot0: DISC_CLOSURE,
356 slot1: sv.slot1,
357 slot2: Box::into_raw(new_env_box) as u64,
358 slot3: 0,
359 slot4: 0,
360 }
361 }
362 DISC_CHANNEL => {
363 use crate::value::ChannelData;
365 let ptr = sv.slot1 as *const ChannelData;
366 debug_assert!(!ptr.is_null(), "Channel pointer is null");
367 let arc = Arc::from_raw(ptr);
368 let cloned = Arc::clone(&arc);
369 std::mem::forget(arc);
370 StackValue {
371 slot0: DISC_CHANNEL,
372 slot1: Arc::into_raw(cloned) as u64,
373 slot2: 0,
374 slot3: 0,
375 slot4: 0,
376 }
377 }
378 DISC_WEAVECTX => {
379 use crate::value::WeaveChannelData;
381 let yield_ptr = sv.slot1 as *const WeaveChannelData;
382 let resume_ptr = sv.slot2 as *const WeaveChannelData;
383 debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
384 debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
385 let yield_arc = Arc::from_raw(yield_ptr);
386 let resume_arc = Arc::from_raw(resume_ptr);
387 let yield_cloned = Arc::clone(&yield_arc);
388 let resume_cloned = Arc::clone(&resume_arc);
389 std::mem::forget(yield_arc);
390 std::mem::forget(resume_arc);
391 StackValue {
392 slot0: DISC_WEAVECTX,
393 slot1: Arc::into_raw(yield_cloned) as u64,
394 slot2: Arc::into_raw(resume_cloned) as u64,
395 slot3: 0,
396 slot4: 0,
397 }
398 }
399 DISC_SYMBOL => {
400 let capacity = sv.slot3 as usize;
401 let is_global = sv.slot4 != 0;
402
403 if capacity == 0 && is_global {
406 let ptr = sv.slot1 as *const u8;
407 let len = sv.slot2 as usize;
408
409 debug_assert!(
413 !ptr.is_null(),
414 "Interned symbol has null pointer in clone fast path"
415 );
416
417 let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
421 let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
422 StackValue {
423 slot0: DISC_SYMBOL,
424 slot1: new_ptr as u64,
425 slot2: new_len as u64,
426 slot3: new_cap as u64,
427 slot4: if new_global { 1 } else { 0 },
428 }
429 } else {
430 let ptr = sv.slot1 as *const u8;
432 let len = sv.slot2 as usize;
433 debug_assert!(!ptr.is_null(), "Symbol pointer is null");
434 let slice = std::slice::from_raw_parts(ptr, len);
435 #[cfg(debug_assertions)]
436 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
437 #[cfg(not(debug_assertions))]
438 let s = std::str::from_utf8_unchecked(slice);
439 let cloned = crate::seqstring::global_string(s.to_string());
440 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
441 StackValue {
442 slot0: DISC_SYMBOL,
443 slot1: new_ptr as u64,
444 slot2: new_len as u64,
445 slot3: new_cap as u64,
446 slot4: if new_global { 1 } else { 0 },
447 }
448 }
449 }
450 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
451 }
452 }
453}
454
455#[cfg(feature = "nanbox")]
460#[inline]
461pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
462 StackValue(sv.0.clone_nanboxed())
463}
464
465#[cfg(not(feature = "nanbox"))]
470#[inline]
471pub unsafe fn drop_stack_value(sv: StackValue) {
472 unsafe {
473 match sv.slot0 {
474 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
475 }
477 DISC_STRING => {
478 use crate::seqstring::SeqString;
480 let ptr = sv.slot1 as *const u8;
481 let len = sv.slot2 as usize;
482 let capacity = sv.slot3 as usize;
483 let global = sv.slot4 != 0;
484 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
485 }
487 DISC_VARIANT => {
488 use crate::value::VariantData;
489 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
490 }
491 DISC_MAP => {
492 use crate::value::MapKey;
493 use std::collections::HashMap;
494 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
495 }
496 DISC_CLOSURE => {
497 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
499 }
500 DISC_CHANNEL => {
501 use crate::value::ChannelData;
502 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
503 }
504 DISC_WEAVECTX => {
505 use crate::value::WeaveChannelData;
506 let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
507 let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
508 }
509 DISC_SYMBOL => {
510 use crate::seqstring::SeqString;
512 let ptr = sv.slot1 as *const u8;
513 let len = sv.slot2 as usize;
514 let capacity = sv.slot3 as usize;
515 let global = sv.slot4 != 0;
516 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
517 }
518 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
519 }
520 }
521}
522
523#[cfg(feature = "nanbox")]
528#[inline]
529pub unsafe fn drop_stack_value(sv: StackValue) {
530 unsafe { sv.0.drop_nanboxed() }
531}
532
533#[inline]
544pub unsafe fn push(stack: Stack, value: Value) -> Stack {
545 unsafe {
546 let sv = value_to_stack_value(value);
547 *stack = sv;
548 stack.add(1)
549 }
550}
551
552#[inline]
557pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
558 unsafe {
559 *stack = sv;
560 stack.add(1)
561 }
562}
563
564#[inline]
571pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
572 unsafe {
573 let new_sp = stack.sub(1);
574 let sv = *new_sp;
575 (new_sp, stack_value_to_value(sv))
576 }
577}
578
579#[inline]
584pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
585 unsafe {
586 let new_sp = stack.sub(1);
587 let sv = *new_sp;
588 (new_sp, sv)
589 }
590}
591
592#[inline]
600pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
601 unsafe {
602 let (sp, b) = pop(stack);
603 let (sp, a) = pop(sp);
604 (sp, a, b)
605 }
606}
607
608#[inline]
613pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
614 unsafe {
615 let (sp, c) = pop(stack);
616 let (sp, b) = pop(sp);
617 let (sp, a) = pop(sp);
618 (sp, a, b, c)
619 }
620}
621
622#[inline]
627pub unsafe fn peek(stack: Stack) -> Value {
628 unsafe {
629 let sv = *stack.sub(1);
630 stack_value_to_value(clone_stack_value(&sv))
632 }
633}
634
635#[inline]
640pub unsafe fn peek_sv(stack: Stack) -> StackValue {
641 unsafe { *stack.sub(1) }
642}
643
644#[inline]
647pub fn is_empty(_stack: Stack) -> bool {
648 false
651}
652
653#[unsafe(no_mangle)]
662pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
663 unsafe {
664 let sv = peek_sv(stack);
665 let cloned = clone_stack_value(&sv);
666 push_sv(stack, cloned)
667 }
668}
669
670#[inline]
675pub unsafe fn drop_top(stack: Stack) -> Stack {
676 unsafe {
677 let (new_sp, sv) = pop_sv(stack);
678 drop_stack_value(sv);
679 new_sp
680 }
681}
682
683#[unsafe(no_mangle)]
688pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
689 unsafe { drop_top(stack) }
690}
691
692#[allow(improper_ctypes_definitions)]
697#[unsafe(no_mangle)]
698pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
699 unsafe { push(stack, value) }
700}
701
702#[unsafe(no_mangle)]
707pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
708 unsafe {
709 let ptr_b = stack.sub(1);
710 let ptr_a = stack.sub(2);
711 let a = *ptr_a;
712 let b = *ptr_b;
713 *ptr_a = b;
714 *ptr_b = a;
715 stack
716 }
717}
718
719#[unsafe(no_mangle)]
724pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
725 unsafe {
726 let sv_a = *stack.sub(2);
727 let cloned = clone_stack_value(&sv_a);
728 push_sv(stack, cloned)
729 }
730}
731
732#[unsafe(no_mangle)]
737pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
738 unsafe {
739 let ptr_c = stack.sub(1);
740 let ptr_b = stack.sub(2);
741 let ptr_a = stack.sub(3);
742 let a = *ptr_a;
743 let b = *ptr_b;
744 let c = *ptr_c;
745 *ptr_a = b;
746 *ptr_b = c;
747 *ptr_c = a;
748 stack
749 }
750}
751
752#[unsafe(no_mangle)]
757pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
758 unsafe {
759 let ptr_b = stack.sub(1);
760 let ptr_a = stack.sub(2);
761 let a = *ptr_a;
762 let b = *ptr_b;
763 drop_stack_value(a);
764 *ptr_a = b;
765 stack.sub(1)
766 }
767}
768
769#[unsafe(no_mangle)]
774pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
775 unsafe {
776 let ptr_b = stack.sub(1);
777 let ptr_a = stack.sub(2);
778 let a = *ptr_a;
779 let b = *ptr_b;
780 let b_clone = clone_stack_value(&b);
781 *ptr_a = b;
782 *ptr_b = a;
783 push_sv(stack, b_clone)
784 }
785}
786
787#[unsafe(no_mangle)]
792pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
793 unsafe {
794 let sv_a = *stack.sub(2);
795 let sv_b = *stack.sub(1);
796 let a_clone = clone_stack_value(&sv_a);
797 let b_clone = clone_stack_value(&sv_b);
798 let sp = push_sv(stack, a_clone);
799 push_sv(sp, b_clone)
800 }
801}
802
803#[unsafe(no_mangle)]
808pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
809 unsafe {
810 let (sp, sv_c) = pop_sv(stack);
811 let (sp, sv_b) = pop_sv(sp);
812 let (sp, sv_a) = pop_sv(sp);
813 drop_stack_value(sv_c);
814 drop_stack_value(sv_b);
815 drop_stack_value(sv_a);
816 sp
817 }
818}
819
820#[unsafe(no_mangle)]
832pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
833 unsafe {
834 let (sp, n_val) = pop(stack);
836 let n_raw = match n_val {
837 Value::Int(i) => i,
838 _ => {
839 crate::error::set_runtime_error("pick: expected Int index on top of stack");
841 return sp;
842 }
843 };
844
845 if n_raw < 0 {
847 crate::error::set_runtime_error(format!(
848 "pick: index cannot be negative (got {})",
849 n_raw
850 ));
851 return sp; }
853 let n = n_raw as usize;
854
855 let base = get_stack_base();
857 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
858 if n >= depth {
859 crate::error::set_runtime_error(format!(
860 "pick: index {} exceeds stack depth {} (need at least {} values)",
861 n,
862 depth,
863 n + 1
864 ));
865 return sp; }
867
868 let sv = *sp.sub(n + 1);
870 let cloned = clone_stack_value(&sv);
871 push_sv(sp, cloned)
872 }
873}
874
875#[unsafe(no_mangle)]
887pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
888 unsafe {
889 let (sp, n_val) = pop(stack);
891 let n_raw = match n_val {
892 Value::Int(i) => i,
893 _ => {
894 crate::error::set_runtime_error("roll: expected Int index on top of stack");
896 return sp;
897 }
898 };
899
900 if n_raw < 0 {
902 crate::error::set_runtime_error(format!(
903 "roll: index cannot be negative (got {})",
904 n_raw
905 ));
906 return sp; }
908 let n = n_raw as usize;
909
910 if n == 0 {
911 return sp;
912 }
913 if n == 1 {
914 return patch_seq_swap(sp);
915 }
916 if n == 2 {
917 return patch_seq_rot(sp);
918 }
919
920 let base = get_stack_base();
922 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
923 if n >= depth {
924 crate::error::set_runtime_error(format!(
925 "roll: index {} exceeds stack depth {} (need at least {} values)",
926 n,
927 depth,
928 n + 1
929 ));
930 return sp; }
932
933 let src_ptr = sp.sub(n + 1);
935 let saved = *src_ptr;
936
937 std::ptr::copy(src_ptr.add(1), src_ptr, n);
939
940 *sp.sub(1) = saved;
942
943 sp
944 }
945}
946
947pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
954 unsafe {
955 for i in 0..count {
956 let sv = *src.sub(count - i);
957 let cloned = clone_stack_value(&sv);
958 *dst.add(i) = cloned;
959 }
960 }
961}
962
963use std::cell::Cell;
972
973may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
975
976#[unsafe(no_mangle)]
981pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
982 STACK_BASE.with(|cell| {
983 cell.set(base as usize);
984 });
985}
986
987#[inline]
989pub fn get_stack_base() -> Stack {
990 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
991}
992
993#[unsafe(no_mangle)]
1002pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
1003 unsafe {
1004 let (new_sp, _base) = clone_stack_with_base(sp);
1005 new_sp
1006 }
1007}
1008
1009pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
1017 let base = get_stack_base();
1018 if base.is_null() {
1019 panic!("clone_stack: stack base not set");
1020 }
1021
1022 let depth = unsafe { sp.offset_from(base) as usize };
1024
1025 if depth == 0 {
1026 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
1028 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
1029 let new_base = new_stack.base;
1030 std::mem::forget(new_stack); return (new_base, new_base);
1032 }
1033
1034 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
1036 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
1037 let new_stack = TaggedStack::new(capacity);
1038 let new_base = new_stack.base;
1039 std::mem::forget(new_stack); unsafe {
1043 for i in 0..depth {
1044 let sv = &*base.add(i);
1045 let cloned = clone_stack_value(sv);
1046 *new_base.add(i) = cloned;
1047 }
1048 }
1049
1050 unsafe { (new_base.add(depth), new_base) }
1052}
1053
1054pub use patch_seq_2dup as two_dup;
1059pub use patch_seq_3drop as three_drop;
1060pub use patch_seq_dup as dup;
1061pub use patch_seq_nip as nip;
1062pub use patch_seq_over as over;
1063pub use patch_seq_pick_op as pick;
1064pub use patch_seq_roll as roll;
1065pub use patch_seq_rot as rot;
1066pub use patch_seq_swap as swap;
1067pub use patch_seq_tuck as tuck;
1068
1069pub fn alloc_stack() -> Stack {
1081 use crate::tagged_stack::TaggedStack;
1082 let stack = TaggedStack::with_default_capacity();
1083 let base = stack.base;
1084 std::mem::forget(stack); base
1086}
1087
1088#[cfg(test)]
1091pub fn alloc_test_stack() -> Stack {
1092 let stack = alloc_stack();
1093 unsafe { patch_seq_set_stack_base(stack) };
1094 stack
1095}
1096
1097#[unsafe(no_mangle)]
1107pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1108 let base = get_stack_base();
1109 if base.is_null() {
1110 eprintln!("[stack.dump: base not set]");
1111 return sp;
1112 }
1113
1114 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1115
1116 if depth == 0 {
1117 println!("stack:");
1118 } else {
1119 use std::io::Write;
1120 print!("stack: ");
1121 for i in 0..depth {
1122 if i > 0 {
1123 print!(" ");
1124 }
1125 unsafe {
1126 let sv = *base.add(i);
1127 print_stack_value(&sv);
1128 }
1129 }
1130 println!();
1131 let _ = std::io::stdout().flush();
1134
1135 for i in 0..depth {
1137 unsafe {
1138 let sv = *base.add(i);
1139 drop_stack_value(sv);
1140 }
1141 }
1142 }
1143
1144 base
1146}
1147
1148fn print_stack_value(sv: &StackValue) {
1155 use crate::son::{SonConfig, value_to_son};
1156
1157 let cloned = unsafe { clone_stack_value(sv) };
1163 let value = unsafe { stack_value_to_value(cloned) };
1164 let son = value_to_son(&value, &SonConfig::compact());
1165 print!("{}", son);
1166}
1167
1168#[macro_export]
1170macro_rules! test_stack {
1171 () => {{
1172 use $crate::tagged_stack::StackValue;
1173 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1174 unsafe { BUFFER.as_mut_ptr() }
1175 }};
1176}
1177
1178#[cfg(test)]
1179mod tests {
1180 use super::*;
1181
1182 #[test]
1183 fn test_pick_negative_index_sets_error() {
1184 unsafe {
1185 crate::error::clear_runtime_error();
1186 let stack = alloc_test_stack();
1187 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_pick_op(stack);
1191
1192 assert!(crate::error::has_runtime_error());
1193 let error = crate::error::take_runtime_error().unwrap();
1194 assert!(error.contains("negative"));
1195 }
1196 }
1197
1198 #[test]
1199 fn test_pick_out_of_bounds_sets_error() {
1200 unsafe {
1201 crate::error::clear_runtime_error();
1202 let stack = alloc_test_stack();
1203 let stack = push(stack, Value::Int(100)); let stack = push(stack, Value::Int(10)); let _stack = patch_seq_pick_op(stack);
1207
1208 assert!(crate::error::has_runtime_error());
1209 let error = crate::error::take_runtime_error().unwrap();
1210 assert!(error.contains("exceeds stack depth"));
1211 }
1212 }
1213
1214 #[test]
1215 fn test_roll_negative_index_sets_error() {
1216 unsafe {
1217 crate::error::clear_runtime_error();
1218 let stack = alloc_test_stack();
1219 let stack = push(stack, Value::Int(100));
1220 let stack = push(stack, Value::Int(-1)); let _stack = patch_seq_roll(stack);
1223
1224 assert!(crate::error::has_runtime_error());
1225 let error = crate::error::take_runtime_error().unwrap();
1226 assert!(error.contains("negative"));
1227 }
1228 }
1229
1230 #[test]
1231 fn test_roll_out_of_bounds_sets_error() {
1232 unsafe {
1233 crate::error::clear_runtime_error();
1234 let stack = alloc_test_stack();
1235 let stack = push(stack, Value::Int(100));
1236 let stack = push(stack, Value::Int(10)); let _stack = patch_seq_roll(stack);
1239
1240 assert!(crate::error::has_runtime_error());
1241 let error = crate::error::take_runtime_error().unwrap();
1242 assert!(error.contains("exceeds stack depth"));
1243 }
1244 }
1245}