1use crate::tagged_stack::{StackValue, TAG_FALSE, TAG_TRUE, is_tagged_int, tag_int, untag_int};
16use crate::value::Value;
17use std::sync::Arc;
18
19pub type Stack = *mut StackValue;
21
22#[inline]
24pub fn stack_value_size() -> usize {
25 std::mem::size_of::<StackValue>()
26}
27
28pub const DISC_INT: u64 = 0;
35pub const DISC_FLOAT: u64 = 1;
36pub const DISC_BOOL: u64 = 2;
37pub const DISC_STRING: u64 = 3;
38pub const DISC_VARIANT: u64 = 4;
39pub const DISC_MAP: u64 = 5;
40pub const DISC_QUOTATION: u64 = 6;
41pub const DISC_CLOSURE: u64 = 7;
42pub const DISC_CHANNEL: u64 = 8;
43pub const DISC_WEAVECTX: u64 = 9;
44pub const DISC_SYMBOL: u64 = 10;
45
46#[inline]
48pub fn value_to_stack_value(value: Value) -> StackValue {
49 match value {
50 Value::Int(i) => tag_int(i),
51 Value::Bool(false) => TAG_FALSE,
52 Value::Bool(true) => TAG_TRUE,
53 other => {
54 Arc::into_raw(Arc::new(other)) as u64
56 }
57 }
58}
59
60#[inline]
66pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
67 if is_tagged_int(sv) {
68 Value::Int(untag_int(sv))
69 } else if sv == TAG_FALSE {
70 Value::Bool(false)
71 } else if sv == TAG_TRUE {
72 Value::Bool(true)
73 } else {
74 let arc = unsafe { Arc::from_raw(sv as *const Value) };
76 Arc::try_unwrap(arc).unwrap_or_else(|arc| (*arc).clone())
80 }
81}
82
83#[unsafe(no_mangle)]
88pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
89 unsafe {
90 let sv = *src;
91 let cloned = clone_stack_value(sv);
92 *dst = cloned;
93 }
94}
95
96#[inline]
104pub unsafe fn clone_stack_value(sv: StackValue) -> StackValue {
105 if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
106 sv
108 } else {
109 unsafe {
111 let arc = Arc::from_raw(sv as *const Value);
112 let cloned = Arc::clone(&arc);
113 std::mem::forget(arc); Arc::into_raw(cloned) as u64
115 }
116 }
117}
118
119#[inline]
124pub unsafe fn drop_stack_value(sv: StackValue) {
125 if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
126 return;
128 }
129 unsafe {
131 let _ = Arc::from_raw(sv as *const Value);
132 }
133}
134
135#[inline]
144pub unsafe fn push(stack: Stack, value: Value) -> Stack {
145 unsafe {
146 let sv = value_to_stack_value(value);
147 *stack = sv;
148 stack.add(1)
149 }
150}
151
152#[inline]
157pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
158 unsafe {
159 *stack = sv;
160 stack.add(1)
161 }
162}
163
164#[inline]
169pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
170 unsafe {
171 let new_sp = stack.sub(1);
172 let sv = *new_sp;
173 (new_sp, stack_value_to_value(sv))
174 }
175}
176
177#[inline]
182pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
183 unsafe {
184 let new_sp = stack.sub(1);
185 let sv = *new_sp;
186 (new_sp, sv)
187 }
188}
189
190#[inline]
195pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
196 unsafe {
197 let (sp, b) = pop(stack);
198 let (sp, a) = pop(sp);
199 (sp, a, b)
200 }
201}
202
203#[inline]
208pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
209 unsafe {
210 let (sp, c) = pop(stack);
211 let (sp, b) = pop(sp);
212 let (sp, a) = pop(sp);
213 (sp, a, b, c)
214 }
215}
216
217#[inline]
222pub unsafe fn peek(stack: Stack) -> Value {
223 unsafe {
224 let sv = *stack.sub(1);
225 let cloned = clone_stack_value(sv);
226 stack_value_to_value(cloned)
227 }
228}
229
230#[inline]
235pub unsafe fn peek_sv(stack: Stack) -> StackValue {
236 unsafe { *stack.sub(1) }
237}
238
239#[inline]
264pub unsafe fn heap_value_mut<'a>(slot: *mut StackValue) -> Option<&'a mut Value> {
265 unsafe {
266 let sv = *slot;
267 if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
269 return None;
270 }
271 let mut arc = Arc::from_raw(sv as *const Value);
274 let val_ref = Arc::get_mut(&mut arc).map(|v| &mut *(v as *mut Value));
275 std::mem::forget(arc); val_ref
277 }
278}
279
280#[inline]
285pub unsafe fn peek_heap_mut<'a>(stack: Stack) -> Option<&'a mut Value> {
286 unsafe { heap_value_mut(stack.sub(1)) }
287}
288
289#[inline]
295pub unsafe fn peek_heap_mut_second<'a>(stack: Stack) -> Option<&'a mut Value> {
296 unsafe { heap_value_mut(stack.sub(2)) }
297}
298
299#[unsafe(no_mangle)]
308pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
309 unsafe {
310 let sv = peek_sv(stack);
311 let cloned = clone_stack_value(sv);
312 push_sv(stack, cloned)
313 }
314}
315
316#[inline]
321pub unsafe fn drop_top(stack: Stack) -> Stack {
322 unsafe {
323 let (new_sp, sv) = pop_sv(stack);
324 drop_stack_value(sv);
325 new_sp
326 }
327}
328
329#[unsafe(no_mangle)]
332pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
333 unsafe { drop_top(stack) }
334}
335
336#[allow(improper_ctypes_definitions)]
339#[unsafe(no_mangle)]
340pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
341 unsafe { push(stack, value) }
342}
343
344#[unsafe(no_mangle)]
349pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
350 unsafe {
351 let ptr_b = stack.sub(1);
352 let ptr_a = stack.sub(2);
353 let a = *ptr_a;
354 let b = *ptr_b;
355 *ptr_a = b;
356 *ptr_b = a;
357 stack
358 }
359}
360
361#[unsafe(no_mangle)]
366pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
367 unsafe {
368 let sv_a = *stack.sub(2);
369 let cloned = clone_stack_value(sv_a);
370 push_sv(stack, cloned)
371 }
372}
373
374#[unsafe(no_mangle)]
379pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
380 unsafe {
381 let ptr_c = stack.sub(1);
382 let ptr_b = stack.sub(2);
383 let ptr_a = stack.sub(3);
384 let a = *ptr_a;
385 let b = *ptr_b;
386 let c = *ptr_c;
387 *ptr_a = b;
388 *ptr_b = c;
389 *ptr_c = a;
390 stack
391 }
392}
393
394#[unsafe(no_mangle)]
399pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
400 unsafe {
401 let ptr_b = stack.sub(1);
402 let ptr_a = stack.sub(2);
403 let a = *ptr_a;
404 let b = *ptr_b;
405 drop_stack_value(a);
406 *ptr_a = b;
407 stack.sub(1)
408 }
409}
410
411#[unsafe(no_mangle)]
416pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
417 unsafe {
418 let ptr_b = stack.sub(1);
419 let ptr_a = stack.sub(2);
420 let a = *ptr_a;
421 let b = *ptr_b;
422 let b_clone = clone_stack_value(b);
423 *ptr_a = b;
424 *ptr_b = a;
425 push_sv(stack, b_clone)
426 }
427}
428
429#[unsafe(no_mangle)]
434pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
435 unsafe {
436 let sv_a = *stack.sub(2);
437 let sv_b = *stack.sub(1);
438 let a_clone = clone_stack_value(sv_a);
439 let b_clone = clone_stack_value(sv_b);
440 let sp = push_sv(stack, a_clone);
441 push_sv(sp, b_clone)
442 }
443}
444
445#[unsafe(no_mangle)]
450pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
451 unsafe {
452 let (sp, n_val) = pop(stack);
453 let n_raw = match n_val {
454 Value::Int(i) => i,
455 _ => {
456 crate::error::set_runtime_error("pick: expected Int index on top of stack");
457 return sp;
458 }
459 };
460
461 if n_raw < 0 {
462 crate::error::set_runtime_error(format!(
463 "pick: index cannot be negative (got {})",
464 n_raw
465 ));
466 return sp;
467 }
468 let n = n_raw as usize;
469
470 let base = get_stack_base();
471 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
472 if n >= depth {
473 crate::error::set_runtime_error(format!(
474 "pick: index {} exceeds stack depth {} (need at least {} values)",
475 n,
476 depth,
477 n + 1
478 ));
479 return sp;
480 }
481
482 let sv = *sp.sub(n + 1);
483 let cloned = clone_stack_value(sv);
484 push_sv(sp, cloned)
485 }
486}
487
488#[unsafe(no_mangle)]
493pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
494 unsafe {
495 let (sp, n_val) = pop(stack);
496 let n_raw = match n_val {
497 Value::Int(i) => i,
498 _ => {
499 crate::error::set_runtime_error("roll: expected Int index on top of stack");
500 return sp;
501 }
502 };
503
504 if n_raw < 0 {
505 crate::error::set_runtime_error(format!(
506 "roll: index cannot be negative (got {})",
507 n_raw
508 ));
509 return sp;
510 }
511 let n = n_raw as usize;
512
513 if n == 0 {
514 return sp;
515 }
516 if n == 1 {
517 return patch_seq_swap(sp);
518 }
519 if n == 2 {
520 return patch_seq_rot(sp);
521 }
522
523 let base = get_stack_base();
524 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
525 if n >= depth {
526 crate::error::set_runtime_error(format!(
527 "roll: index {} exceeds stack depth {} (need at least {} values)",
528 n,
529 depth,
530 n + 1
531 ));
532 return sp;
533 }
534
535 let src_ptr = sp.sub(n + 1);
536 let saved = *src_ptr;
537 std::ptr::copy(src_ptr.add(1), src_ptr, n);
538 *sp.sub(1) = saved;
539
540 sp
541 }
542}
543
544pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
549 unsafe {
550 for i in 0..count {
551 let sv = *src.sub(count - i);
552 let cloned = clone_stack_value(sv);
553 *dst.add(i) = cloned;
554 }
555 }
556}
557
558use std::cell::Cell;
563
564may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
565
566#[unsafe(no_mangle)]
569pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
570 STACK_BASE.with(|cell| {
571 cell.set(base as usize);
572 });
573}
574
575#[inline]
576pub fn get_stack_base() -> Stack {
577 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
578}
579
580#[unsafe(no_mangle)]
583pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
584 unsafe {
585 let (new_sp, _base) = clone_stack_with_base(sp);
586 new_sp
587 }
588}
589
590pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
593 let base = get_stack_base();
594 if base.is_null() {
595 panic!("clone_stack: stack base not set");
596 }
597
598 let depth = unsafe { sp.offset_from(base) as usize };
599
600 if depth == 0 {
601 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
602 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
603 let new_base = new_stack.base;
604 std::mem::forget(new_stack);
605 return (new_base, new_base);
606 }
607
608 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
609 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
610 let new_stack = TaggedStack::new(capacity);
611 let new_base = new_stack.base;
612 std::mem::forget(new_stack);
613
614 unsafe {
615 for i in 0..depth {
616 let sv = *base.add(i);
617 let cloned = clone_stack_value(sv);
618 *new_base.add(i) = cloned;
619 }
620 }
621
622 unsafe { (new_base.add(depth), new_base) }
623}
624
625pub fn alloc_stack() -> Stack {
630 use crate::tagged_stack::TaggedStack;
631 let stack = TaggedStack::with_default_capacity();
632 let base = stack.base;
633 std::mem::forget(stack);
634 base
635}
636
637pub fn alloc_test_stack() -> Stack {
638 let stack = alloc_stack();
639 unsafe { patch_seq_set_stack_base(stack) };
640 stack
641}
642
643#[unsafe(no_mangle)]
648pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
649 let base = get_stack_base();
650 if base.is_null() {
651 eprintln!("[stack.dump: base not set]");
652 return sp;
653 }
654
655 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
656
657 if depth == 0 {
658 println!("»");
659 } else {
660 use std::io::Write;
661 print!("» ");
662 for i in 0..depth {
663 if i > 0 {
664 print!(" ");
665 }
666 unsafe {
667 let sv = *base.add(i);
668 print_stack_value(sv);
669 }
670 }
671 println!();
672 let _ = std::io::stdout().flush();
673
674 for i in 0..depth {
676 unsafe {
677 let sv = *base.add(i);
678 drop_stack_value(sv);
679 }
680 }
681 }
682
683 base
684}
685
686fn print_stack_value(sv: StackValue) {
687 use crate::son::{SonConfig, value_to_son};
688
689 let cloned = unsafe { clone_stack_value(sv) };
690 let value = unsafe { stack_value_to_value(cloned) };
691 let son = value_to_son(&value, &SonConfig::compact());
692 print!("{}", son);
693}
694
695pub use patch_seq_2dup as two_dup;
700pub use patch_seq_dup as dup;
701pub use patch_seq_nip as nip;
702pub use patch_seq_over as over;
703pub use patch_seq_pick_op as pick;
704pub use patch_seq_roll as roll;
705pub use patch_seq_rot as rot;
706pub use patch_seq_swap as swap;
707pub use patch_seq_tuck as tuck;
708
709#[macro_export]
710macro_rules! test_stack {
711 () => {{ $crate::stack::alloc_test_stack() }};
712}
713
714#[cfg(test)]
715mod tests {
716 use super::*;
717
718 #[test]
719 fn test_pick_negative_index_sets_error() {
720 unsafe {
721 crate::error::clear_runtime_error();
722 let stack = alloc_test_stack();
723 let stack = push(stack, Value::Int(100));
724 let stack = push(stack, Value::Int(-1));
725
726 let _stack = patch_seq_pick_op(stack);
727
728 assert!(crate::error::has_runtime_error());
729 let error = crate::error::take_runtime_error().unwrap();
730 assert!(error.contains("negative"));
731 }
732 }
733
734 #[test]
735 fn test_pick_out_of_bounds_sets_error() {
736 unsafe {
737 crate::error::clear_runtime_error();
738 let stack = alloc_test_stack();
739 let stack = push(stack, Value::Int(100));
740 let stack = push(stack, Value::Int(10));
741
742 let _stack = patch_seq_pick_op(stack);
743
744 assert!(crate::error::has_runtime_error());
745 let error = crate::error::take_runtime_error().unwrap();
746 assert!(error.contains("exceeds stack depth"));
747 }
748 }
749
750 #[test]
751 fn test_roll_negative_index_sets_error() {
752 unsafe {
753 crate::error::clear_runtime_error();
754 let stack = alloc_test_stack();
755 let stack = push(stack, Value::Int(100));
756 let stack = push(stack, Value::Int(-1));
757
758 let _stack = patch_seq_roll(stack);
759
760 assert!(crate::error::has_runtime_error());
761 let error = crate::error::take_runtime_error().unwrap();
762 assert!(error.contains("negative"));
763 }
764 }
765
766 #[test]
767 fn test_roll_out_of_bounds_sets_error() {
768 unsafe {
769 crate::error::clear_runtime_error();
770 let stack = alloc_test_stack();
771 let stack = push(stack, Value::Int(100));
772 let stack = push(stack, Value::Int(10));
773
774 let _stack = patch_seq_roll(stack);
775
776 assert!(crate::error::has_runtime_error());
777 let error = crate::error::take_runtime_error().unwrap();
778 assert!(error.contains("exceeds stack depth"));
779 }
780 }
781
782 #[test]
783 fn test_int_roundtrip() {
784 unsafe {
785 let stack = alloc_test_stack();
786 let stack = push(stack, Value::Int(42));
787 let (_, val) = pop(stack);
788 assert_eq!(val, Value::Int(42));
789 }
790 }
791
792 #[test]
793 fn test_bool_roundtrip() {
794 unsafe {
795 let stack = alloc_test_stack();
796 let stack = push(stack, Value::Bool(true));
797 let stack = push(stack, Value::Bool(false));
798 let (stack, val_f) = pop(stack);
799 let (_, val_t) = pop(stack);
800 assert_eq!(val_f, Value::Bool(false));
801 assert_eq!(val_t, Value::Bool(true));
802 }
803 }
804
805 #[test]
806 fn test_float_roundtrip() {
807 unsafe {
808 let stack = alloc_test_stack();
809 let stack = push(stack, Value::Float(std::f64::consts::PI));
810 let (_, val) = pop(stack);
811 assert_eq!(val, Value::Float(std::f64::consts::PI));
812 }
813 }
814
815 #[test]
816 fn test_string_roundtrip() {
817 unsafe {
818 let stack = alloc_test_stack();
819 let s = crate::seqstring::SeqString::from("hello");
820 let stack = push(stack, Value::String(s));
821 let (_, val) = pop(stack);
822 match val {
823 Value::String(s) => assert_eq!(s.as_str(), "hello"),
824 other => panic!("Expected String, got {:?}", other),
825 }
826 }
827 }
828
829 #[test]
830 fn test_symbol_roundtrip() {
831 unsafe {
832 let stack = alloc_test_stack();
833 let s = crate::seqstring::SeqString::from("my-sym");
834 let stack = push(stack, Value::Symbol(s));
835 let (_, val) = pop(stack);
836 match val {
837 Value::Symbol(s) => assert_eq!(s.as_str(), "my-sym"),
838 other => panic!("Expected Symbol, got {:?}", other),
839 }
840 }
841 }
842
843 #[test]
844 fn test_variant_roundtrip() {
845 unsafe {
846 let stack = alloc_test_stack();
847 let tag = crate::seqstring::SeqString::from("Foo");
848 let data = crate::value::VariantData::new(tag, vec![Value::Int(1), Value::Int(2)]);
849 let stack = push(stack, Value::Variant(std::sync::Arc::new(data)));
850 let (_, val) = pop(stack);
851 match val {
852 Value::Variant(v) => {
853 assert_eq!(v.tag.as_str(), "Foo");
854 assert_eq!(v.fields.len(), 2);
855 }
856 other => panic!("Expected Variant, got {:?}", other),
857 }
858 }
859 }
860
861 #[test]
862 fn test_map_roundtrip() {
863 unsafe {
864 let stack = alloc_test_stack();
865 let mut map = std::collections::HashMap::new();
866 map.insert(crate::value::MapKey::Int(1), Value::Int(100));
867 let stack = push(stack, Value::Map(Box::new(map)));
868 let (_, val) = pop(stack);
869 match val {
870 Value::Map(m) => {
871 assert_eq!(m.len(), 1);
872 assert_eq!(m.get(&crate::value::MapKey::Int(1)), Some(&Value::Int(100)));
873 }
874 other => panic!("Expected Map, got {:?}", other),
875 }
876 }
877 }
878
879 #[test]
880 fn test_quotation_roundtrip() {
881 unsafe {
882 let stack = alloc_test_stack();
883 let stack = push(
884 stack,
885 Value::Quotation {
886 wrapper: 0x1000,
887 impl_: 0x2000,
888 },
889 );
890 let (_, val) = pop(stack);
891 match val {
892 Value::Quotation { wrapper, impl_ } => {
893 assert_eq!(wrapper, 0x1000);
894 assert_eq!(impl_, 0x2000);
895 }
896 other => panic!("Expected Quotation, got {:?}", other),
897 }
898 }
899 }
900
901 #[test]
902 fn test_closure_roundtrip() {
903 unsafe {
904 let stack = alloc_test_stack();
905 let env: std::sync::Arc<[Value]> = std::sync::Arc::from(vec![Value::Int(42)]);
906 let stack = push(
907 stack,
908 Value::Closure {
909 fn_ptr: 0x3000,
910 env,
911 },
912 );
913 let (_, val) = pop(stack);
914 match val {
915 Value::Closure { fn_ptr, env } => {
916 assert_eq!(fn_ptr, 0x3000);
917 assert_eq!(env.len(), 1);
918 }
919 other => panic!("Expected Closure, got {:?}", other),
920 }
921 }
922 }
923
924 #[test]
925 fn test_channel_roundtrip() {
926 unsafe {
927 let stack = alloc_test_stack();
928 let (sender, receiver) = may::sync::mpmc::channel();
929 let ch = std::sync::Arc::new(crate::value::ChannelData { sender, receiver });
930 let stack = push(stack, Value::Channel(ch));
931 let (_, val) = pop(stack);
932 assert!(matches!(val, Value::Channel(_)));
933 }
934 }
935
936 #[test]
937 fn test_weavectx_roundtrip() {
938 unsafe {
939 let stack = alloc_test_stack();
940 let (ys, yr) = may::sync::mpmc::channel();
941 let (rs, rr) = may::sync::mpmc::channel();
942 let yield_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
943 sender: ys,
944 receiver: yr,
945 });
946 let resume_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
947 sender: rs,
948 receiver: rr,
949 });
950 let stack = push(
951 stack,
952 Value::WeaveCtx {
953 yield_chan,
954 resume_chan,
955 },
956 );
957 let (_, val) = pop(stack);
958 assert!(matches!(val, Value::WeaveCtx { .. }));
959 }
960 }
961
962 #[test]
963 fn test_dup_pop_pop_heap_type() {
964 unsafe {
967 let stack = alloc_test_stack();
968 let stack = push(stack, Value::Float(2.5));
969 let stack = patch_seq_dup(stack);
971 let (stack, val1) = pop(stack);
973 let (_, val2) = pop(stack);
974 assert_eq!(val1, Value::Float(2.5));
975 assert_eq!(val2, Value::Float(2.5));
976 }
977 }
978}