1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14pub type Stack = *mut StackValue;
19
20pub const DISC_INT: u64 = 0;
22pub const DISC_FLOAT: u64 = 1;
23pub const DISC_BOOL: u64 = 2;
24pub const DISC_STRING: u64 = 3;
25pub const DISC_VARIANT: u64 = 4;
26pub const DISC_MAP: u64 = 5;
27pub const DISC_QUOTATION: u64 = 6;
28pub const DISC_CLOSURE: u64 = 7;
29pub const DISC_CHANNEL: u64 = 8;
30
31#[inline]
33pub fn value_to_stack_value(value: Value) -> StackValue {
34 match value {
35 Value::Int(i) => StackValue {
36 slot0: DISC_INT,
37 slot1: i as u64,
38 slot2: 0,
39 slot3: 0,
40 slot4: 0,
41 },
42 Value::Float(f) => StackValue {
43 slot0: DISC_FLOAT,
44 slot1: f.to_bits(),
45 slot2: 0,
46 slot3: 0,
47 slot4: 0,
48 },
49 Value::Bool(b) => StackValue {
50 slot0: DISC_BOOL,
51 slot1: if b { 1 } else { 0 },
52 slot2: 0,
53 slot3: 0,
54 slot4: 0,
55 },
56 Value::String(s) => {
57 let (ptr, len, capacity, global) = s.into_raw_parts();
60 StackValue {
61 slot0: DISC_STRING,
62 slot1: ptr as u64,
63 slot2: len as u64,
64 slot3: capacity as u64,
65 slot4: if global { 1 } else { 0 },
66 }
67 }
68 Value::Variant(v) => {
69 let ptr = Arc::into_raw(v) as u64;
70 StackValue {
71 slot0: DISC_VARIANT,
72 slot1: ptr,
73 slot2: 0,
74 slot3: 0,
75 slot4: 0,
76 }
77 }
78 Value::Map(m) => {
79 let ptr = Box::into_raw(m) as u64;
80 StackValue {
81 slot0: DISC_MAP,
82 slot1: ptr,
83 slot2: 0,
84 slot3: 0,
85 slot4: 0,
86 }
87 }
88 Value::Quotation { wrapper, impl_ } => StackValue {
89 slot0: DISC_QUOTATION,
90 slot1: wrapper as u64,
91 slot2: impl_ as u64,
92 slot3: 0,
93 slot4: 0,
94 },
95 Value::Closure { fn_ptr, env } => {
96 let env_box = Box::new(env);
98 let env_ptr = Box::into_raw(env_box) as u64;
99 StackValue {
100 slot0: DISC_CLOSURE,
101 slot1: fn_ptr as u64,
102 slot2: env_ptr,
103 slot3: 0,
104 slot4: 0,
105 }
106 }
107 Value::Channel(ch) => {
108 let ptr = Arc::into_raw(ch) as u64;
110 StackValue {
111 slot0: DISC_CHANNEL,
112 slot1: ptr,
113 slot2: 0,
114 slot3: 0,
115 slot4: 0,
116 }
117 }
118 }
119}
120
121#[inline]
126pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
127 unsafe {
128 match sv.slot0 {
129 DISC_INT => Value::Int(sv.slot1 as i64),
130 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
131 DISC_BOOL => Value::Bool(sv.slot1 != 0),
132 DISC_STRING => {
133 use crate::seqstring::SeqString;
134 let ptr = sv.slot1 as *const u8;
135 let len = sv.slot2 as usize;
136 let capacity = sv.slot3 as usize;
137 let global = sv.slot4 != 0;
138 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
139 }
140 DISC_VARIANT => {
141 use crate::value::VariantData;
142 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
143 Value::Variant(arc)
144 }
145 DISC_MAP => {
146 use crate::value::MapKey;
147 use std::collections::HashMap;
148 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
149 Value::Map(boxed)
150 }
151 DISC_QUOTATION => Value::Quotation {
152 wrapper: sv.slot1 as usize,
153 impl_: sv.slot2 as usize,
154 },
155 DISC_CLOSURE => {
156 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
158 Value::Closure {
159 fn_ptr: sv.slot1 as usize,
160 env: *env_box,
161 }
162 }
163 DISC_CHANNEL => {
164 use crate::value::ChannelData;
165 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
166 Value::Channel(arc)
167 }
168 _ => panic!("Invalid discriminant: {}", sv.slot0),
169 }
170 }
171}
172
173#[unsafe(no_mangle)]
180pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
181 unsafe {
182 let sv = &*src;
183 let cloned = clone_stack_value(sv);
184 *dst = cloned;
185 }
186}
187
188#[inline]
203pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
204 unsafe {
205 match sv.slot0 {
206 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
207 DISC_STRING => {
208 let ptr = sv.slot1 as *const u8;
210 let len = sv.slot2 as usize;
211 debug_assert!(!ptr.is_null(), "String pointer is null");
212 let slice = std::slice::from_raw_parts(ptr, len);
214 #[cfg(debug_assertions)]
216 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
217 #[cfg(not(debug_assertions))]
218 let s = std::str::from_utf8_unchecked(slice);
219 let cloned = crate::seqstring::global_string(s.to_string());
221 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
222 StackValue {
223 slot0: DISC_STRING,
224 slot1: new_ptr as u64,
225 slot2: new_len as u64,
226 slot3: new_cap as u64,
227 slot4: if new_global { 1 } else { 0 },
228 }
229 }
230 DISC_VARIANT => {
231 use crate::value::VariantData;
232 let ptr = sv.slot1 as *const VariantData;
233 debug_assert!(!ptr.is_null(), "Variant pointer is null");
234 debug_assert!(
235 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
236 "Variant pointer is misaligned"
237 );
238 let arc = Arc::from_raw(ptr);
239 let cloned = Arc::clone(&arc);
240 std::mem::forget(arc);
241 StackValue {
242 slot0: DISC_VARIANT,
243 slot1: Arc::into_raw(cloned) as u64,
244 slot2: 0,
245 slot3: 0,
246 slot4: 0,
247 }
248 }
249 DISC_MAP => {
250 use crate::value::MapKey;
252 use std::collections::HashMap;
253 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
254 debug_assert!(!ptr.is_null(), "Map pointer is null");
255 debug_assert!(
256 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
257 "Map pointer is misaligned"
258 );
259 let boxed = Box::from_raw(ptr);
260 let cloned = boxed.clone();
261 std::mem::forget(boxed);
262 StackValue {
263 slot0: DISC_MAP,
264 slot1: Box::into_raw(cloned) as u64,
265 slot2: 0,
266 slot3: 0,
267 slot4: 0,
268 }
269 }
270 DISC_CLOSURE => {
271 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
273 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
274 debug_assert!(
275 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
276 "Closure env pointer is misaligned"
277 );
278 let env_arc = &*env_box_ptr;
279 let cloned_env = Arc::clone(env_arc);
280 let new_env_box = Box::new(cloned_env);
282 StackValue {
283 slot0: DISC_CLOSURE,
284 slot1: sv.slot1,
285 slot2: Box::into_raw(new_env_box) as u64,
286 slot3: 0,
287 slot4: 0,
288 }
289 }
290 DISC_CHANNEL => {
291 use crate::value::ChannelData;
293 let ptr = sv.slot1 as *const ChannelData;
294 debug_assert!(!ptr.is_null(), "Channel pointer is null");
295 let arc = Arc::from_raw(ptr);
296 let cloned = Arc::clone(&arc);
297 std::mem::forget(arc);
298 StackValue {
299 slot0: DISC_CHANNEL,
300 slot1: Arc::into_raw(cloned) as u64,
301 slot2: 0,
302 slot3: 0,
303 slot4: 0,
304 }
305 }
306 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
307 }
308 }
309}
310
311#[inline]
316pub unsafe fn drop_stack_value(sv: StackValue) {
317 unsafe {
318 match sv.slot0 {
319 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
320 }
322 DISC_STRING => {
323 use crate::seqstring::SeqString;
325 let ptr = sv.slot1 as *const u8;
326 let len = sv.slot2 as usize;
327 let capacity = sv.slot3 as usize;
328 let global = sv.slot4 != 0;
329 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
330 }
332 DISC_VARIANT => {
333 use crate::value::VariantData;
334 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
335 }
336 DISC_MAP => {
337 use crate::value::MapKey;
338 use std::collections::HashMap;
339 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
340 }
341 DISC_CLOSURE => {
342 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
344 }
345 DISC_CHANNEL => {
346 use crate::value::ChannelData;
347 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
348 }
349 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
350 }
351 }
352}
353
354#[inline]
365pub unsafe fn push(stack: Stack, value: Value) -> Stack {
366 unsafe {
367 let sv = value_to_stack_value(value);
368 *stack = sv;
369 stack.add(1)
370 }
371}
372
373#[inline]
378pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
379 unsafe {
380 *stack = sv;
381 stack.add(1)
382 }
383}
384
385#[inline]
392pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
393 unsafe {
394 let new_sp = stack.sub(1);
395 let sv = *new_sp;
396 (new_sp, stack_value_to_value(sv))
397 }
398}
399
400#[inline]
405pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
406 unsafe {
407 let new_sp = stack.sub(1);
408 let sv = *new_sp;
409 (new_sp, sv)
410 }
411}
412
413#[inline]
421pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
422 unsafe {
423 let (sp, b) = pop(stack);
424 let (sp, a) = pop(sp);
425 (sp, a, b)
426 }
427}
428
429#[inline]
434pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
435 unsafe {
436 let (sp, c) = pop(stack);
437 let (sp, b) = pop(sp);
438 let (sp, a) = pop(sp);
439 (sp, a, b, c)
440 }
441}
442
443#[inline]
448pub unsafe fn peek(stack: Stack) -> Value {
449 unsafe {
450 let sv = *stack.sub(1);
451 stack_value_to_value(clone_stack_value(&sv))
453 }
454}
455
456#[inline]
461pub unsafe fn peek_sv(stack: Stack) -> StackValue {
462 unsafe { *stack.sub(1) }
463}
464
465#[inline]
468pub fn is_empty(_stack: Stack) -> bool {
469 false
472}
473
474#[unsafe(no_mangle)]
483pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
484 unsafe {
485 let sv = peek_sv(stack);
486 let cloned = clone_stack_value(&sv);
487 push_sv(stack, cloned)
488 }
489}
490
491#[inline]
496pub unsafe fn drop_top(stack: Stack) -> Stack {
497 unsafe {
498 let (new_sp, sv) = pop_sv(stack);
499 drop_stack_value(sv);
500 new_sp
501 }
502}
503
504#[unsafe(no_mangle)]
509pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
510 unsafe { drop_top(stack) }
511}
512
513#[allow(improper_ctypes_definitions)]
518#[unsafe(no_mangle)]
519pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
520 unsafe { push(stack, value) }
521}
522
523#[unsafe(no_mangle)]
528pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
529 unsafe {
530 let ptr_b = stack.sub(1);
531 let ptr_a = stack.sub(2);
532 let a = *ptr_a;
533 let b = *ptr_b;
534 *ptr_a = b;
535 *ptr_b = a;
536 stack
537 }
538}
539
540#[unsafe(no_mangle)]
545pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
546 unsafe {
547 let sv_a = *stack.sub(2);
548 let cloned = clone_stack_value(&sv_a);
549 push_sv(stack, cloned)
550 }
551}
552
553#[unsafe(no_mangle)]
558pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
559 unsafe {
560 let ptr_c = stack.sub(1);
561 let ptr_b = stack.sub(2);
562 let ptr_a = stack.sub(3);
563 let a = *ptr_a;
564 let b = *ptr_b;
565 let c = *ptr_c;
566 *ptr_a = b;
567 *ptr_b = c;
568 *ptr_c = a;
569 stack
570 }
571}
572
573#[unsafe(no_mangle)]
578pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
579 unsafe {
580 let ptr_b = stack.sub(1);
581 let ptr_a = stack.sub(2);
582 let a = *ptr_a;
583 let b = *ptr_b;
584 drop_stack_value(a);
585 *ptr_a = b;
586 stack.sub(1)
587 }
588}
589
590#[unsafe(no_mangle)]
595pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
596 unsafe {
597 let ptr_b = stack.sub(1);
598 let ptr_a = stack.sub(2);
599 let a = *ptr_a;
600 let b = *ptr_b;
601 let b_clone = clone_stack_value(&b);
602 *ptr_a = b;
603 *ptr_b = a;
604 push_sv(stack, b_clone)
605 }
606}
607
608#[unsafe(no_mangle)]
613pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
614 unsafe {
615 let sv_a = *stack.sub(2);
616 let sv_b = *stack.sub(1);
617 let a_clone = clone_stack_value(&sv_a);
618 let b_clone = clone_stack_value(&sv_b);
619 let sp = push_sv(stack, a_clone);
620 push_sv(sp, b_clone)
621 }
622}
623
624#[unsafe(no_mangle)]
629pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
630 unsafe {
631 let (sp, sv_c) = pop_sv(stack);
632 let (sp, sv_b) = pop_sv(sp);
633 let (sp, sv_a) = pop_sv(sp);
634 drop_stack_value(sv_c);
635 drop_stack_value(sv_b);
636 drop_stack_value(sv_a);
637 sp
638 }
639}
640
641#[unsafe(no_mangle)]
652pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
653 unsafe {
654 let (sp, n_val) = pop(stack);
656 let n_raw = match n_val {
657 Value::Int(i) => i,
658 _ => panic!("pick: expected Int"),
659 };
660
661 if n_raw < 0 {
663 panic!("pick: index cannot be negative (got {})", n_raw);
664 }
665 let n = n_raw as usize;
666
667 let base = get_stack_base();
669 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
670 if n >= depth {
671 panic!(
672 "pick: index {} exceeds stack depth {} (need at least {} values)",
673 n,
674 depth,
675 n + 1
676 );
677 }
678
679 let sv = *sp.sub(n + 1);
681 let cloned = clone_stack_value(&sv);
682 push_sv(sp, cloned)
683 }
684}
685
686#[unsafe(no_mangle)]
697pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
698 unsafe {
699 let (sp, n_val) = pop(stack);
701 let n_raw = match n_val {
702 Value::Int(i) => i,
703 _ => panic!("roll: expected Int"),
704 };
705
706 if n_raw < 0 {
708 panic!("roll: index cannot be negative (got {})", n_raw);
709 }
710 let n = n_raw as usize;
711
712 if n == 0 {
713 return sp;
714 }
715 if n == 1 {
716 return patch_seq_swap(sp);
717 }
718 if n == 2 {
719 return patch_seq_rot(sp);
720 }
721
722 let base = get_stack_base();
724 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
725 if n >= depth {
726 panic!(
727 "roll: index {} exceeds stack depth {} (need at least {} values)",
728 n,
729 depth,
730 n + 1
731 );
732 }
733
734 let src_ptr = sp.sub(n + 1);
736 let saved = *src_ptr;
737
738 std::ptr::copy(src_ptr.add(1), src_ptr, n);
740
741 *sp.sub(1) = saved;
743
744 sp
745 }
746}
747
748pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
755 unsafe {
756 for i in 0..count {
757 let sv = *src.sub(count - i);
758 let cloned = clone_stack_value(&sv);
759 *dst.add(i) = cloned;
760 }
761 }
762}
763
764use std::cell::Cell;
773
774may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
776
777#[unsafe(no_mangle)]
782pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
783 STACK_BASE.with(|cell| {
784 cell.set(base as usize);
785 });
786}
787
788#[inline]
790pub fn get_stack_base() -> Stack {
791 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
792}
793
794#[unsafe(no_mangle)]
803pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
804 unsafe {
805 let (new_sp, _base) = clone_stack_with_base(sp);
806 new_sp
807 }
808}
809
810pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
818 let base = get_stack_base();
819 if base.is_null() {
820 panic!("clone_stack: stack base not set");
821 }
822
823 let depth = unsafe { sp.offset_from(base) as usize };
825
826 if depth == 0 {
827 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
829 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
830 let new_base = new_stack.base;
831 std::mem::forget(new_stack); return (new_base, new_base);
833 }
834
835 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
837 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
838 let new_stack = TaggedStack::new(capacity);
839 let new_base = new_stack.base;
840 std::mem::forget(new_stack); unsafe {
844 for i in 0..depth {
845 let sv = &*base.add(i);
846 let cloned = clone_stack_value(sv);
847 *new_base.add(i) = cloned;
848 }
849 }
850
851 unsafe { (new_base.add(depth), new_base) }
853}
854
855pub use patch_seq_2dup as two_dup;
860pub use patch_seq_3drop as three_drop;
861pub use patch_seq_dup as dup;
862pub use patch_seq_nip as nip;
863pub use patch_seq_over as over;
864pub use patch_seq_pick_op as pick;
865pub use patch_seq_roll as roll;
866pub use patch_seq_rot as rot;
867pub use patch_seq_swap as swap;
868pub use patch_seq_tuck as tuck;
869
870pub fn alloc_stack() -> Stack {
882 use crate::tagged_stack::TaggedStack;
883 let stack = TaggedStack::with_default_capacity();
884 let base = stack.base;
885 std::mem::forget(stack); base
887}
888
889#[cfg(test)]
892pub fn alloc_test_stack() -> Stack {
893 let stack = alloc_stack();
894 unsafe { patch_seq_set_stack_base(stack) };
895 stack
896}
897
898#[unsafe(no_mangle)]
908pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
909 let base = get_stack_base();
910 if base.is_null() {
911 eprintln!("[stack.dump: base not set]");
912 return sp;
913 }
914
915 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
916
917 if depth == 0 {
918 println!("[]");
919 } else {
920 print!("[");
921 for i in 0..depth {
922 if i > 0 {
923 print!(", ");
924 }
925 unsafe {
926 let sv = *base.add(i);
927 print_stack_value(&sv);
928 }
929 }
930 println!("]");
931
932 for i in 0..depth {
934 unsafe {
935 let sv = *base.add(i);
936 drop_stack_value(sv);
937 }
938 }
939 }
940
941 base
943}
944
945fn print_stack_value(sv: &StackValue) {
952 match sv.slot0 {
953 DISC_INT => print!("{}", sv.slot1 as i64),
954 DISC_FLOAT => print!("{}", f64::from_bits(sv.slot1)),
955 DISC_BOOL => print!("{}", if sv.slot1 != 0 { "true" } else { "false" }),
956 DISC_STRING => {
957 let ptr = sv.slot1 as *const u8;
958 let len = sv.slot2 as usize;
959 if ptr.is_null() || len == 0 {
963 print!("\"\"");
964 } else if len > 10_000_000 {
965 print!("<string:invalid length {}>", len);
967 } else {
968 unsafe {
969 let slice = std::slice::from_raw_parts(ptr, len);
970 if let Ok(s) = std::str::from_utf8(slice) {
971 print!("\"{}\"", s);
972 } else {
973 print!("<string:{} bytes, non-utf8>", len);
974 }
975 }
976 }
977 }
978 DISC_VARIANT => print!("<variant>"),
979 DISC_MAP => print!("<map>"),
980 DISC_QUOTATION => print!("<quotation>"),
981 DISC_CLOSURE => print!("<closure>"),
982 DISC_CHANNEL => print!("<channel>"),
983 _ => print!("<unknown:{}>", sv.slot0),
984 }
985}
986
987#[macro_export]
989macro_rules! test_stack {
990 () => {{
991 use $crate::tagged_stack::StackValue;
992 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
993 unsafe { BUFFER.as_mut_ptr() }
994 }};
995}