1use crate::son::{SonConfig, value_to_son};
17use crate::tagged_stack::{
18 DEFAULT_STACK_CAPACITY, StackValue, TAG_FALSE, TAG_TRUE, TaggedStack, is_tagged_int, tag_int,
19 untag_int,
20};
21use crate::value::Value;
22use std::cell::Cell;
23use std::sync::Arc;
24
25pub type Stack = *mut StackValue;
27
28#[inline]
29pub fn stack_value_size() -> usize {
30 std::mem::size_of::<StackValue>()
31}
32
33pub const DISC_INT: u64 = 0;
40pub const DISC_FLOAT: u64 = 1;
41pub const DISC_BOOL: u64 = 2;
42pub const DISC_STRING: u64 = 3;
43pub const DISC_VARIANT: u64 = 4;
44pub const DISC_MAP: u64 = 5;
45pub const DISC_QUOTATION: u64 = 6;
46pub const DISC_CLOSURE: u64 = 7;
47pub const DISC_CHANNEL: u64 = 8;
48pub const DISC_WEAVECTX: u64 = 9;
49pub const DISC_SYMBOL: u64 = 10;
50
51#[inline]
53pub fn value_to_stack_value(value: Value) -> StackValue {
54 match value {
55 Value::Int(i) => tag_int(i),
56 Value::Bool(false) => TAG_FALSE,
57 Value::Bool(true) => TAG_TRUE,
58 other => {
59 Arc::into_raw(Arc::new(other)) as u64
61 }
62 }
63}
64
65#[inline]
71pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
72 if is_tagged_int(sv) {
73 Value::Int(untag_int(sv))
74 } else if sv == TAG_FALSE {
75 Value::Bool(false)
76 } else if sv == TAG_TRUE {
77 Value::Bool(true)
78 } else {
79 let arc = unsafe { Arc::from_raw(sv as *const Value) };
81 Arc::try_unwrap(arc).unwrap_or_else(|arc| (*arc).clone())
85 }
86}
87
88#[unsafe(no_mangle)]
93pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
94 unsafe {
95 let sv = *src;
96 let cloned = clone_stack_value(sv);
97 *dst = cloned;
98 }
99}
100
101#[inline]
109pub unsafe fn clone_stack_value(sv: StackValue) -> StackValue {
110 if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
111 sv
113 } else {
114 unsafe {
116 let arc = Arc::from_raw(sv as *const Value);
117 let cloned = Arc::clone(&arc);
118 std::mem::forget(arc); Arc::into_raw(cloned) as u64
120 }
121 }
122}
123
124#[inline]
129pub unsafe fn drop_stack_value(sv: StackValue) {
130 if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
131 return;
133 }
134 unsafe {
136 let _ = Arc::from_raw(sv as *const Value);
137 }
138}
139
140#[inline]
149pub unsafe fn push(stack: Stack, value: Value) -> Stack {
150 unsafe {
151 let sv = value_to_stack_value(value);
152 *stack = sv;
153 stack.add(1)
154 }
155}
156
157#[inline]
162pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
163 unsafe {
164 *stack = sv;
165 stack.add(1)
166 }
167}
168
169#[inline]
174pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
175 unsafe {
176 let new_sp = stack.sub(1);
177 let sv = *new_sp;
178 (new_sp, stack_value_to_value(sv))
179 }
180}
181
182#[inline]
187pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
188 unsafe {
189 let new_sp = stack.sub(1);
190 let sv = *new_sp;
191 (new_sp, sv)
192 }
193}
194
195#[inline]
200pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
201 unsafe {
202 let (sp, b) = pop(stack);
203 let (sp, a) = pop(sp);
204 (sp, a, b)
205 }
206}
207
208#[inline]
213pub unsafe fn peek(stack: Stack) -> Value {
214 unsafe {
215 let sv = *stack.sub(1);
216 let cloned = clone_stack_value(sv);
217 stack_value_to_value(cloned)
218 }
219}
220
221#[inline]
226pub unsafe fn peek_sv(stack: Stack) -> StackValue {
227 unsafe { *stack.sub(1) }
228}
229
230#[inline]
255pub unsafe fn heap_value_mut<'a>(slot: *mut StackValue) -> Option<&'a mut Value> {
256 unsafe {
257 let sv = *slot;
258 if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
260 return None;
261 }
262 let mut arc = Arc::from_raw(sv as *const Value);
265 let val_ref = Arc::get_mut(&mut arc).map(|v| &mut *(v as *mut Value));
266 std::mem::forget(arc); val_ref
268 }
269}
270
271#[inline]
276pub unsafe fn peek_heap_mut<'a>(stack: Stack) -> Option<&'a mut Value> {
277 unsafe { heap_value_mut(stack.sub(1)) }
278}
279
280#[inline]
286pub unsafe fn peek_heap_mut_second<'a>(stack: Stack) -> Option<&'a mut Value> {
287 unsafe { heap_value_mut(stack.sub(2)) }
288}
289
290#[unsafe(no_mangle)]
299pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
300 unsafe {
301 let sv = peek_sv(stack);
302 let cloned = clone_stack_value(sv);
303 push_sv(stack, cloned)
304 }
305}
306
307#[inline]
316pub unsafe fn drop_top(stack: Stack) -> Stack {
317 unsafe {
318 let (new_sp, sv) = pop_sv(stack);
319 drop_stack_value(sv);
320 new_sp
321 }
322}
323
324#[unsafe(no_mangle)]
327pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
328 unsafe { drop_top(stack) }
329}
330
331#[allow(improper_ctypes_definitions)]
334#[unsafe(no_mangle)]
335pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
336 unsafe { push(stack, value) }
337}
338
339#[unsafe(no_mangle)]
344pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
345 unsafe {
346 let ptr_b = stack.sub(1);
347 let ptr_a = stack.sub(2);
348 let a = *ptr_a;
349 let b = *ptr_b;
350 *ptr_a = b;
351 *ptr_b = a;
352 stack
353 }
354}
355
356#[unsafe(no_mangle)]
361pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
362 unsafe {
363 let sv_a = *stack.sub(2);
364 let cloned = clone_stack_value(sv_a);
365 push_sv(stack, cloned)
366 }
367}
368
369#[unsafe(no_mangle)]
374pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
375 unsafe {
376 let ptr_c = stack.sub(1);
377 let ptr_b = stack.sub(2);
378 let ptr_a = stack.sub(3);
379 let a = *ptr_a;
380 let b = *ptr_b;
381 let c = *ptr_c;
382 *ptr_a = b;
383 *ptr_b = c;
384 *ptr_c = a;
385 stack
386 }
387}
388
389#[unsafe(no_mangle)]
394pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
395 unsafe {
396 let ptr_b = stack.sub(1);
397 let ptr_a = stack.sub(2);
398 let a = *ptr_a;
399 let b = *ptr_b;
400 drop_stack_value(a);
401 *ptr_a = b;
402 stack.sub(1)
403 }
404}
405
406#[unsafe(no_mangle)]
411pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
412 unsafe {
413 let ptr_b = stack.sub(1);
414 let ptr_a = stack.sub(2);
415 let a = *ptr_a;
416 let b = *ptr_b;
417 let b_clone = clone_stack_value(b);
418 *ptr_a = b;
419 *ptr_b = a;
420 push_sv(stack, b_clone)
421 }
422}
423
424#[unsafe(no_mangle)]
429pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
430 unsafe {
431 let sv_a = *stack.sub(2);
432 let sv_b = *stack.sub(1);
433 let a_clone = clone_stack_value(sv_a);
434 let b_clone = clone_stack_value(sv_b);
435 let sp = push_sv(stack, a_clone);
436 push_sv(sp, b_clone)
437 }
438}
439
440#[inline]
449unsafe fn pop_and_validate_index(stack: Stack, op_name: &str) -> Result<(Stack, usize), Stack> {
450 unsafe {
451 let (sp, n_val) = pop(stack);
452 let n_raw = match n_val {
453 Value::Int(i) => i,
454 _ => {
455 crate::error::set_runtime_error(format!(
456 "{}: expected Int index on top of stack",
457 op_name
458 ));
459 return Err(sp);
460 }
461 };
462 if n_raw < 0 {
463 crate::error::set_runtime_error(format!(
464 "{}: index cannot be negative (got {})",
465 op_name, n_raw
466 ));
467 return Err(sp);
468 }
469 Ok((sp, n_raw as usize))
470 }
471}
472
473#[inline]
476fn check_depth_for_index(sp: Stack, n: usize, op_name: &str) -> bool {
477 let base = get_stack_base();
478 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
479 if n >= depth {
480 crate::error::set_runtime_error(format!(
481 "{}: index {} exceeds stack depth {} (need at least {} values)",
482 op_name,
483 n,
484 depth,
485 n + 1
486 ));
487 return false;
488 }
489 true
490}
491
492#[unsafe(no_mangle)]
497pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
498 unsafe {
499 let (sp, n) = match pop_and_validate_index(stack, "pick") {
500 Ok(x) => x,
501 Err(sp) => return sp,
502 };
503 if !check_depth_for_index(sp, n, "pick") {
504 return sp;
505 }
506
507 let sv = *sp.sub(n + 1);
508 let cloned = clone_stack_value(sv);
509 push_sv(sp, cloned)
510 }
511}
512
513#[unsafe(no_mangle)]
518pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
519 unsafe {
520 let (sp, n) = match pop_and_validate_index(stack, "roll") {
521 Ok(x) => x,
522 Err(sp) => return sp,
523 };
524
525 if n == 0 {
526 return sp;
527 }
528 if n == 1 {
529 return patch_seq_swap(sp);
530 }
531 if n == 2 {
532 return patch_seq_rot(sp);
533 }
534
535 if !check_depth_for_index(sp, n, "roll") {
536 return sp;
537 }
538
539 let src_ptr = sp.sub(n + 1);
540 let saved = *src_ptr;
541 std::ptr::copy(src_ptr.add(1), src_ptr, n);
542 *sp.sub(1) = saved;
543
544 sp
545 }
546}
547
548may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
553
554#[unsafe(no_mangle)]
557pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
558 STACK_BASE.with(|cell| {
559 cell.set(base as usize);
560 });
561}
562
563#[inline]
565pub fn get_stack_base() -> Stack {
566 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
567}
568
569#[unsafe(no_mangle)]
572pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
573 unsafe {
574 let (new_sp, _base) = clone_stack_with_base(sp);
575 new_sp
576 }
577}
578
579pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
582 let base = get_stack_base();
583 if base.is_null() {
584 panic!("clone_stack: stack base not set");
585 }
586
587 let depth = unsafe { sp.offset_from(base) as usize };
588
589 if depth == 0 {
590 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
591 let new_base = new_stack.base;
592 std::mem::forget(new_stack);
593 return (new_base, new_base);
594 }
595
596 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
597 let new_stack = TaggedStack::new(capacity);
598 let new_base = new_stack.base;
599 std::mem::forget(new_stack);
600
601 unsafe {
602 for i in 0..depth {
603 let sv = *base.add(i);
604 let cloned = clone_stack_value(sv);
605 *new_base.add(i) = cloned;
606 }
607 }
608
609 unsafe { (new_base.add(depth), new_base) }
610}
611
612pub fn alloc_stack() -> Stack {
622 let stack = TaggedStack::with_default_capacity();
623 let base = stack.base;
624 std::mem::forget(stack);
625 base
626}
627
628pub fn alloc_test_stack() -> Stack {
634 let stack = alloc_stack();
635 unsafe { patch_seq_set_stack_base(stack) };
636 stack
637}
638
639#[unsafe(no_mangle)]
644pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
645 let base = get_stack_base();
646 if base.is_null() {
647 eprintln!("[stack.dump: base not set]");
648 return sp;
649 }
650
651 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
652
653 if depth == 0 {
654 println!("»");
655 } else {
656 use std::io::Write;
657 print!("» ");
658 for i in 0..depth {
659 if i > 0 {
660 print!(" ");
661 }
662 unsafe {
663 let sv = *base.add(i);
664 print_stack_value(sv);
665 }
666 }
667 println!();
668 let _ = std::io::stdout().flush();
669
670 for i in 0..depth {
672 unsafe {
673 let sv = *base.add(i);
674 drop_stack_value(sv);
675 }
676 }
677 }
678
679 base
680}
681
682fn print_stack_value(sv: StackValue) {
683 let cloned = unsafe { clone_stack_value(sv) };
684 let value = unsafe { stack_value_to_value(cloned) };
685 let son = value_to_son(&value, &SonConfig::compact());
686 print!("{}", son);
687}
688
689pub use patch_seq_2dup as two_dup;
694pub use patch_seq_dup as dup;
695pub use patch_seq_nip as nip;
696pub use patch_seq_over as over;
697pub use patch_seq_pick_op as pick;
698pub use patch_seq_roll as roll;
699pub use patch_seq_rot as rot;
700pub use patch_seq_swap as swap;
701pub use patch_seq_tuck as tuck;
702
703#[macro_export]
704macro_rules! test_stack {
705 () => {{ $crate::stack::alloc_test_stack() }};
706}
707
708#[cfg(test)]
709mod tests {
710 use super::*;
711
712 #[test]
713 fn test_pick_negative_index_sets_error() {
714 unsafe {
715 crate::error::clear_runtime_error();
716 let stack = alloc_test_stack();
717 let stack = push(stack, Value::Int(100));
718 let stack = push(stack, Value::Int(-1));
719
720 let _stack = patch_seq_pick_op(stack);
721
722 assert!(crate::error::has_runtime_error());
723 let error = crate::error::take_runtime_error().unwrap();
724 assert!(error.contains("negative"));
725 }
726 }
727
728 #[test]
729 fn test_pick_out_of_bounds_sets_error() {
730 unsafe {
731 crate::error::clear_runtime_error();
732 let stack = alloc_test_stack();
733 let stack = push(stack, Value::Int(100));
734 let stack = push(stack, Value::Int(10));
735
736 let _stack = patch_seq_pick_op(stack);
737
738 assert!(crate::error::has_runtime_error());
739 let error = crate::error::take_runtime_error().unwrap();
740 assert!(error.contains("exceeds stack depth"));
741 }
742 }
743
744 #[test]
745 fn test_roll_negative_index_sets_error() {
746 unsafe {
747 crate::error::clear_runtime_error();
748 let stack = alloc_test_stack();
749 let stack = push(stack, Value::Int(100));
750 let stack = push(stack, Value::Int(-1));
751
752 let _stack = patch_seq_roll(stack);
753
754 assert!(crate::error::has_runtime_error());
755 let error = crate::error::take_runtime_error().unwrap();
756 assert!(error.contains("negative"));
757 }
758 }
759
760 #[test]
761 fn test_roll_out_of_bounds_sets_error() {
762 unsafe {
763 crate::error::clear_runtime_error();
764 let stack = alloc_test_stack();
765 let stack = push(stack, Value::Int(100));
766 let stack = push(stack, Value::Int(10));
767
768 let _stack = patch_seq_roll(stack);
769
770 assert!(crate::error::has_runtime_error());
771 let error = crate::error::take_runtime_error().unwrap();
772 assert!(error.contains("exceeds stack depth"));
773 }
774 }
775
776 #[test]
777 fn test_int_roundtrip() {
778 unsafe {
779 let stack = alloc_test_stack();
780 let stack = push(stack, Value::Int(42));
781 let (_, val) = pop(stack);
782 assert_eq!(val, Value::Int(42));
783 }
784 }
785
786 #[test]
787 fn test_bool_roundtrip() {
788 unsafe {
789 let stack = alloc_test_stack();
790 let stack = push(stack, Value::Bool(true));
791 let stack = push(stack, Value::Bool(false));
792 let (stack, val_f) = pop(stack);
793 let (_, val_t) = pop(stack);
794 assert_eq!(val_f, Value::Bool(false));
795 assert_eq!(val_t, Value::Bool(true));
796 }
797 }
798
799 #[test]
800 fn test_float_roundtrip() {
801 unsafe {
802 let stack = alloc_test_stack();
803 let stack = push(stack, Value::Float(std::f64::consts::PI));
804 let (_, val) = pop(stack);
805 assert_eq!(val, Value::Float(std::f64::consts::PI));
806 }
807 }
808
809 #[test]
810 fn test_string_roundtrip() {
811 unsafe {
812 let stack = alloc_test_stack();
813 let s = crate::seqstring::SeqString::from("hello");
814 let stack = push(stack, Value::String(s));
815 let (_, val) = pop(stack);
816 match val {
817 Value::String(s) => assert_eq!(s.as_str(), "hello"),
818 other => panic!("Expected String, got {:?}", other),
819 }
820 }
821 }
822
823 #[test]
824 fn test_symbol_roundtrip() {
825 unsafe {
826 let stack = alloc_test_stack();
827 let s = crate::seqstring::SeqString::from("my-sym");
828 let stack = push(stack, Value::Symbol(s));
829 let (_, val) = pop(stack);
830 match val {
831 Value::Symbol(s) => assert_eq!(s.as_str(), "my-sym"),
832 other => panic!("Expected Symbol, got {:?}", other),
833 }
834 }
835 }
836
837 #[test]
838 fn test_variant_roundtrip() {
839 unsafe {
840 let stack = alloc_test_stack();
841 let tag = crate::seqstring::SeqString::from("Foo");
842 let data = crate::value::VariantData::new(tag, vec![Value::Int(1), Value::Int(2)]);
843 let stack = push(stack, Value::Variant(std::sync::Arc::new(data)));
844 let (_, val) = pop(stack);
845 match val {
846 Value::Variant(v) => {
847 assert_eq!(v.tag.as_str(), "Foo");
848 assert_eq!(v.fields.len(), 2);
849 }
850 other => panic!("Expected Variant, got {:?}", other),
851 }
852 }
853 }
854
855 #[test]
856 fn test_map_roundtrip() {
857 unsafe {
858 let stack = alloc_test_stack();
859 let mut map = std::collections::HashMap::new();
860 map.insert(crate::value::MapKey::Int(1), Value::Int(100));
861 let stack = push(stack, Value::Map(Box::new(map)));
862 let (_, val) = pop(stack);
863 match val {
864 Value::Map(m) => {
865 assert_eq!(m.len(), 1);
866 assert_eq!(m.get(&crate::value::MapKey::Int(1)), Some(&Value::Int(100)));
867 }
868 other => panic!("Expected Map, got {:?}", other),
869 }
870 }
871 }
872
873 #[test]
874 fn test_quotation_roundtrip() {
875 unsafe {
876 let stack = alloc_test_stack();
877 let stack = push(
878 stack,
879 Value::Quotation {
880 wrapper: 0x1000,
881 impl_: 0x2000,
882 },
883 );
884 let (_, val) = pop(stack);
885 match val {
886 Value::Quotation { wrapper, impl_ } => {
887 assert_eq!(wrapper, 0x1000);
888 assert_eq!(impl_, 0x2000);
889 }
890 other => panic!("Expected Quotation, got {:?}", other),
891 }
892 }
893 }
894
895 #[test]
896 fn test_closure_roundtrip() {
897 unsafe {
898 let stack = alloc_test_stack();
899 let env: std::sync::Arc<[Value]> = std::sync::Arc::from(vec![Value::Int(42)]);
900 let stack = push(
901 stack,
902 Value::Closure {
903 fn_ptr: 0x3000,
904 env,
905 },
906 );
907 let (_, val) = pop(stack);
908 match val {
909 Value::Closure { fn_ptr, env } => {
910 assert_eq!(fn_ptr, 0x3000);
911 assert_eq!(env.len(), 1);
912 }
913 other => panic!("Expected Closure, got {:?}", other),
914 }
915 }
916 }
917
918 #[test]
919 fn test_channel_roundtrip() {
920 unsafe {
921 let stack = alloc_test_stack();
922 let (sender, receiver) = may::sync::mpmc::channel();
923 let ch = std::sync::Arc::new(crate::value::ChannelData { sender, receiver });
924 let stack = push(stack, Value::Channel(ch));
925 let (_, val) = pop(stack);
926 assert!(matches!(val, Value::Channel(_)));
927 }
928 }
929
930 #[test]
931 fn test_weavectx_roundtrip() {
932 unsafe {
933 let stack = alloc_test_stack();
934 let (ys, yr) = may::sync::mpmc::channel();
935 let (rs, rr) = may::sync::mpmc::channel();
936 let yield_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
937 sender: ys,
938 receiver: yr,
939 });
940 let resume_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
941 sender: rs,
942 receiver: rr,
943 });
944 let stack = push(
945 stack,
946 Value::WeaveCtx {
947 yield_chan,
948 resume_chan,
949 },
950 );
951 let (_, val) = pop(stack);
952 assert!(matches!(val, Value::WeaveCtx { .. }));
953 }
954 }
955
956 #[test]
957 fn test_dup_pop_pop_heap_type() {
958 unsafe {
961 let stack = alloc_test_stack();
962 let stack = push(stack, Value::Float(2.5));
963 let stack = patch_seq_dup(stack);
965 let (stack, val1) = pop(stack);
967 let (_, val2) = pop(stack);
968 assert_eq!(val1, Value::Float(2.5));
969 assert_eq!(val2, Value::Float(2.5));
970 }
971 }
972}