1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14pub type Stack = *mut StackValue;
19
20pub const DISC_INT: u64 = 0;
22pub const DISC_FLOAT: u64 = 1;
23pub const DISC_BOOL: u64 = 2;
24pub const DISC_STRING: u64 = 3;
25pub const DISC_VARIANT: u64 = 4;
26pub const DISC_MAP: u64 = 5;
27pub const DISC_QUOTATION: u64 = 6;
28pub const DISC_CLOSURE: u64 = 7;
29pub const DISC_CHANNEL: u64 = 8;
30pub const DISC_WEAVECTX: u64 = 9;
31pub const DISC_SYMBOL: u64 = 10;
32
33#[inline]
35pub fn value_to_stack_value(value: Value) -> StackValue {
36 match value {
37 Value::Int(i) => StackValue {
38 slot0: DISC_INT,
39 slot1: i as u64,
40 slot2: 0,
41 slot3: 0,
42 slot4: 0,
43 },
44 Value::Float(f) => StackValue {
45 slot0: DISC_FLOAT,
46 slot1: f.to_bits(),
47 slot2: 0,
48 slot3: 0,
49 slot4: 0,
50 },
51 Value::Bool(b) => StackValue {
52 slot0: DISC_BOOL,
53 slot1: if b { 1 } else { 0 },
54 slot2: 0,
55 slot3: 0,
56 slot4: 0,
57 },
58 Value::String(s) => {
59 let (ptr, len, capacity, global) = s.into_raw_parts();
62 StackValue {
63 slot0: DISC_STRING,
64 slot1: ptr as u64,
65 slot2: len as u64,
66 slot3: capacity as u64,
67 slot4: if global { 1 } else { 0 },
68 }
69 }
70 Value::Symbol(s) => {
71 let (ptr, len, capacity, global) = s.into_raw_parts();
73 StackValue {
74 slot0: DISC_SYMBOL,
75 slot1: ptr as u64,
76 slot2: len as u64,
77 slot3: capacity as u64,
78 slot4: if global { 1 } else { 0 },
79 }
80 }
81 Value::Variant(v) => {
82 let ptr = Arc::into_raw(v) as u64;
83 StackValue {
84 slot0: DISC_VARIANT,
85 slot1: ptr,
86 slot2: 0,
87 slot3: 0,
88 slot4: 0,
89 }
90 }
91 Value::Map(m) => {
92 let ptr = Box::into_raw(m) as u64;
93 StackValue {
94 slot0: DISC_MAP,
95 slot1: ptr,
96 slot2: 0,
97 slot3: 0,
98 slot4: 0,
99 }
100 }
101 Value::Quotation { wrapper, impl_ } => StackValue {
102 slot0: DISC_QUOTATION,
103 slot1: wrapper as u64,
104 slot2: impl_ as u64,
105 slot3: 0,
106 slot4: 0,
107 },
108 Value::Closure { fn_ptr, env } => {
109 let env_box = Box::new(env);
111 let env_ptr = Box::into_raw(env_box) as u64;
112 StackValue {
113 slot0: DISC_CLOSURE,
114 slot1: fn_ptr as u64,
115 slot2: env_ptr,
116 slot3: 0,
117 slot4: 0,
118 }
119 }
120 Value::Channel(ch) => {
121 let ptr = Arc::into_raw(ch) as u64;
123 StackValue {
124 slot0: DISC_CHANNEL,
125 slot1: ptr,
126 slot2: 0,
127 slot3: 0,
128 slot4: 0,
129 }
130 }
131 Value::WeaveCtx {
132 yield_chan,
133 resume_chan,
134 } => {
135 let yield_ptr = Arc::into_raw(yield_chan) as u64;
137 let resume_ptr = Arc::into_raw(resume_chan) as u64;
138 StackValue {
139 slot0: DISC_WEAVECTX,
140 slot1: yield_ptr,
141 slot2: resume_ptr,
142 slot3: 0,
143 slot4: 0,
144 }
145 }
146 }
147}
148
149#[inline]
154pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
155 unsafe {
156 match sv.slot0 {
157 DISC_INT => Value::Int(sv.slot1 as i64),
158 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
159 DISC_BOOL => Value::Bool(sv.slot1 != 0),
160 DISC_STRING => {
161 use crate::seqstring::SeqString;
162 let ptr = sv.slot1 as *const u8;
163 let len = sv.slot2 as usize;
164 let capacity = sv.slot3 as usize;
165 let global = sv.slot4 != 0;
166 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
167 }
168 DISC_VARIANT => {
169 use crate::value::VariantData;
170 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
171 Value::Variant(arc)
172 }
173 DISC_MAP => {
174 use crate::value::MapKey;
175 use std::collections::HashMap;
176 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
177 Value::Map(boxed)
178 }
179 DISC_QUOTATION => Value::Quotation {
180 wrapper: sv.slot1 as usize,
181 impl_: sv.slot2 as usize,
182 },
183 DISC_CLOSURE => {
184 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
186 Value::Closure {
187 fn_ptr: sv.slot1 as usize,
188 env: *env_box,
189 }
190 }
191 DISC_CHANNEL => {
192 use crate::value::ChannelData;
193 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
194 Value::Channel(arc)
195 }
196 DISC_WEAVECTX => {
197 use crate::value::WeaveChannelData;
198 let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
199 let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
200 Value::WeaveCtx {
201 yield_chan,
202 resume_chan,
203 }
204 }
205 DISC_SYMBOL => {
206 use crate::seqstring::SeqString;
207 let ptr = sv.slot1 as *const u8;
208 let len = sv.slot2 as usize;
209 let capacity = sv.slot3 as usize;
210 let global = sv.slot4 != 0;
211 Value::Symbol(SeqString::from_raw_parts(ptr, len, capacity, global))
212 }
213 _ => panic!("Invalid discriminant: {}", sv.slot0),
214 }
215 }
216}
217
218#[unsafe(no_mangle)]
225pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
226 unsafe {
227 let sv = &*src;
228 let cloned = clone_stack_value(sv);
229 *dst = cloned;
230 }
231}
232
233#[inline]
248pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
249 unsafe {
250 match sv.slot0 {
251 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
252 DISC_STRING => {
253 let ptr = sv.slot1 as *const u8;
255 let len = sv.slot2 as usize;
256 debug_assert!(!ptr.is_null(), "String pointer is null");
257 let slice = std::slice::from_raw_parts(ptr, len);
259 #[cfg(debug_assertions)]
261 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
262 #[cfg(not(debug_assertions))]
263 let s = std::str::from_utf8_unchecked(slice);
264 let cloned = crate::seqstring::global_string(s.to_string());
266 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
267 StackValue {
268 slot0: DISC_STRING,
269 slot1: new_ptr as u64,
270 slot2: new_len as u64,
271 slot3: new_cap as u64,
272 slot4: if new_global { 1 } else { 0 },
273 }
274 }
275 DISC_VARIANT => {
276 use crate::value::VariantData;
277 let ptr = sv.slot1 as *const VariantData;
278 debug_assert!(!ptr.is_null(), "Variant pointer is null");
279 debug_assert!(
280 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
281 "Variant pointer is misaligned"
282 );
283 let arc = Arc::from_raw(ptr);
284 let cloned = Arc::clone(&arc);
285 std::mem::forget(arc);
286 StackValue {
287 slot0: DISC_VARIANT,
288 slot1: Arc::into_raw(cloned) as u64,
289 slot2: 0,
290 slot3: 0,
291 slot4: 0,
292 }
293 }
294 DISC_MAP => {
295 use crate::value::MapKey;
297 use std::collections::HashMap;
298 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
299 debug_assert!(!ptr.is_null(), "Map pointer is null");
300 debug_assert!(
301 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
302 "Map pointer is misaligned"
303 );
304 let boxed = Box::from_raw(ptr);
305 let cloned = boxed.clone();
306 std::mem::forget(boxed);
307 StackValue {
308 slot0: DISC_MAP,
309 slot1: Box::into_raw(cloned) as u64,
310 slot2: 0,
311 slot3: 0,
312 slot4: 0,
313 }
314 }
315 DISC_CLOSURE => {
316 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
318 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
319 debug_assert!(
320 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
321 "Closure env pointer is misaligned"
322 );
323 let env_arc = &*env_box_ptr;
324 let cloned_env = Arc::clone(env_arc);
325 let new_env_box = Box::new(cloned_env);
327 StackValue {
328 slot0: DISC_CLOSURE,
329 slot1: sv.slot1,
330 slot2: Box::into_raw(new_env_box) as u64,
331 slot3: 0,
332 slot4: 0,
333 }
334 }
335 DISC_CHANNEL => {
336 use crate::value::ChannelData;
338 let ptr = sv.slot1 as *const ChannelData;
339 debug_assert!(!ptr.is_null(), "Channel pointer is null");
340 let arc = Arc::from_raw(ptr);
341 let cloned = Arc::clone(&arc);
342 std::mem::forget(arc);
343 StackValue {
344 slot0: DISC_CHANNEL,
345 slot1: Arc::into_raw(cloned) as u64,
346 slot2: 0,
347 slot3: 0,
348 slot4: 0,
349 }
350 }
351 DISC_WEAVECTX => {
352 use crate::value::WeaveChannelData;
354 let yield_ptr = sv.slot1 as *const WeaveChannelData;
355 let resume_ptr = sv.slot2 as *const WeaveChannelData;
356 debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
357 debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
358 let yield_arc = Arc::from_raw(yield_ptr);
359 let resume_arc = Arc::from_raw(resume_ptr);
360 let yield_cloned = Arc::clone(&yield_arc);
361 let resume_cloned = Arc::clone(&resume_arc);
362 std::mem::forget(yield_arc);
363 std::mem::forget(resume_arc);
364 StackValue {
365 slot0: DISC_WEAVECTX,
366 slot1: Arc::into_raw(yield_cloned) as u64,
367 slot2: Arc::into_raw(resume_cloned) as u64,
368 slot3: 0,
369 slot4: 0,
370 }
371 }
372 DISC_SYMBOL => {
373 let capacity = sv.slot3 as usize;
374 let is_global = sv.slot4 != 0;
375
376 if capacity == 0 && is_global {
379 let ptr = sv.slot1 as *const u8;
380 let len = sv.slot2 as usize;
381
382 debug_assert!(
386 !ptr.is_null(),
387 "Interned symbol has null pointer in clone fast path"
388 );
389
390 let seq_str = crate::seqstring::SeqString::from_raw_parts(ptr, len, 0, true);
394 let (new_ptr, new_len, new_cap, new_global) = seq_str.into_raw_parts();
395 StackValue {
396 slot0: DISC_SYMBOL,
397 slot1: new_ptr as u64,
398 slot2: new_len as u64,
399 slot3: new_cap as u64,
400 slot4: if new_global { 1 } else { 0 },
401 }
402 } else {
403 let ptr = sv.slot1 as *const u8;
405 let len = sv.slot2 as usize;
406 debug_assert!(!ptr.is_null(), "Symbol pointer is null");
407 let slice = std::slice::from_raw_parts(ptr, len);
408 #[cfg(debug_assertions)]
409 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in symbol clone");
410 #[cfg(not(debug_assertions))]
411 let s = std::str::from_utf8_unchecked(slice);
412 let cloned = crate::seqstring::global_string(s.to_string());
413 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
414 StackValue {
415 slot0: DISC_SYMBOL,
416 slot1: new_ptr as u64,
417 slot2: new_len as u64,
418 slot3: new_cap as u64,
419 slot4: if new_global { 1 } else { 0 },
420 }
421 }
422 }
423 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
424 }
425 }
426}
427
428#[inline]
433pub unsafe fn drop_stack_value(sv: StackValue) {
434 unsafe {
435 match sv.slot0 {
436 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
437 }
439 DISC_STRING => {
440 use crate::seqstring::SeqString;
442 let ptr = sv.slot1 as *const u8;
443 let len = sv.slot2 as usize;
444 let capacity = sv.slot3 as usize;
445 let global = sv.slot4 != 0;
446 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
447 }
449 DISC_VARIANT => {
450 use crate::value::VariantData;
451 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
452 }
453 DISC_MAP => {
454 use crate::value::MapKey;
455 use std::collections::HashMap;
456 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
457 }
458 DISC_CLOSURE => {
459 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
461 }
462 DISC_CHANNEL => {
463 use crate::value::ChannelData;
464 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
465 }
466 DISC_WEAVECTX => {
467 use crate::value::WeaveChannelData;
468 let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
469 let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
470 }
471 DISC_SYMBOL => {
472 use crate::seqstring::SeqString;
474 let ptr = sv.slot1 as *const u8;
475 let len = sv.slot2 as usize;
476 let capacity = sv.slot3 as usize;
477 let global = sv.slot4 != 0;
478 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
479 }
480 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
481 }
482 }
483}
484
485#[inline]
496pub unsafe fn push(stack: Stack, value: Value) -> Stack {
497 unsafe {
498 let sv = value_to_stack_value(value);
499 *stack = sv;
500 stack.add(1)
501 }
502}
503
504#[inline]
509pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
510 unsafe {
511 *stack = sv;
512 stack.add(1)
513 }
514}
515
516#[inline]
523pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
524 unsafe {
525 let new_sp = stack.sub(1);
526 let sv = *new_sp;
527 (new_sp, stack_value_to_value(sv))
528 }
529}
530
531#[inline]
536pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
537 unsafe {
538 let new_sp = stack.sub(1);
539 let sv = *new_sp;
540 (new_sp, sv)
541 }
542}
543
544#[inline]
552pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
553 unsafe {
554 let (sp, b) = pop(stack);
555 let (sp, a) = pop(sp);
556 (sp, a, b)
557 }
558}
559
560#[inline]
565pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
566 unsafe {
567 let (sp, c) = pop(stack);
568 let (sp, b) = pop(sp);
569 let (sp, a) = pop(sp);
570 (sp, a, b, c)
571 }
572}
573
574#[inline]
579pub unsafe fn peek(stack: Stack) -> Value {
580 unsafe {
581 let sv = *stack.sub(1);
582 stack_value_to_value(clone_stack_value(&sv))
584 }
585}
586
587#[inline]
592pub unsafe fn peek_sv(stack: Stack) -> StackValue {
593 unsafe { *stack.sub(1) }
594}
595
596#[inline]
599pub fn is_empty(_stack: Stack) -> bool {
600 false
603}
604
605#[unsafe(no_mangle)]
614pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
615 unsafe {
616 let sv = peek_sv(stack);
617 let cloned = clone_stack_value(&sv);
618 push_sv(stack, cloned)
619 }
620}
621
622#[inline]
627pub unsafe fn drop_top(stack: Stack) -> Stack {
628 unsafe {
629 let (new_sp, sv) = pop_sv(stack);
630 drop_stack_value(sv);
631 new_sp
632 }
633}
634
635#[unsafe(no_mangle)]
640pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
641 unsafe { drop_top(stack) }
642}
643
644#[allow(improper_ctypes_definitions)]
649#[unsafe(no_mangle)]
650pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
651 unsafe { push(stack, value) }
652}
653
654#[unsafe(no_mangle)]
659pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
660 unsafe {
661 let ptr_b = stack.sub(1);
662 let ptr_a = stack.sub(2);
663 let a = *ptr_a;
664 let b = *ptr_b;
665 *ptr_a = b;
666 *ptr_b = a;
667 stack
668 }
669}
670
671#[unsafe(no_mangle)]
676pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
677 unsafe {
678 let sv_a = *stack.sub(2);
679 let cloned = clone_stack_value(&sv_a);
680 push_sv(stack, cloned)
681 }
682}
683
684#[unsafe(no_mangle)]
689pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
690 unsafe {
691 let ptr_c = stack.sub(1);
692 let ptr_b = stack.sub(2);
693 let ptr_a = stack.sub(3);
694 let a = *ptr_a;
695 let b = *ptr_b;
696 let c = *ptr_c;
697 *ptr_a = b;
698 *ptr_b = c;
699 *ptr_c = a;
700 stack
701 }
702}
703
704#[unsafe(no_mangle)]
709pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
710 unsafe {
711 let ptr_b = stack.sub(1);
712 let ptr_a = stack.sub(2);
713 let a = *ptr_a;
714 let b = *ptr_b;
715 drop_stack_value(a);
716 *ptr_a = b;
717 stack.sub(1)
718 }
719}
720
721#[unsafe(no_mangle)]
726pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
727 unsafe {
728 let ptr_b = stack.sub(1);
729 let ptr_a = stack.sub(2);
730 let a = *ptr_a;
731 let b = *ptr_b;
732 let b_clone = clone_stack_value(&b);
733 *ptr_a = b;
734 *ptr_b = a;
735 push_sv(stack, b_clone)
736 }
737}
738
739#[unsafe(no_mangle)]
744pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
745 unsafe {
746 let sv_a = *stack.sub(2);
747 let sv_b = *stack.sub(1);
748 let a_clone = clone_stack_value(&sv_a);
749 let b_clone = clone_stack_value(&sv_b);
750 let sp = push_sv(stack, a_clone);
751 push_sv(sp, b_clone)
752 }
753}
754
755#[unsafe(no_mangle)]
760pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
761 unsafe {
762 let (sp, sv_c) = pop_sv(stack);
763 let (sp, sv_b) = pop_sv(sp);
764 let (sp, sv_a) = pop_sv(sp);
765 drop_stack_value(sv_c);
766 drop_stack_value(sv_b);
767 drop_stack_value(sv_a);
768 sp
769 }
770}
771
772#[unsafe(no_mangle)]
783pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
784 unsafe {
785 let (sp, n_val) = pop(stack);
787 let n_raw = match n_val {
788 Value::Int(i) => i,
789 _ => panic!("pick: expected Int"),
790 };
791
792 if n_raw < 0 {
794 panic!("pick: index cannot be negative (got {})", n_raw);
795 }
796 let n = n_raw as usize;
797
798 let base = get_stack_base();
800 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
801 if n >= depth {
802 panic!(
803 "pick: index {} exceeds stack depth {} (need at least {} values)",
804 n,
805 depth,
806 n + 1
807 );
808 }
809
810 let sv = *sp.sub(n + 1);
812 let cloned = clone_stack_value(&sv);
813 push_sv(sp, cloned)
814 }
815}
816
817#[unsafe(no_mangle)]
828pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
829 unsafe {
830 let (sp, n_val) = pop(stack);
832 let n_raw = match n_val {
833 Value::Int(i) => i,
834 _ => panic!("roll: expected Int"),
835 };
836
837 if n_raw < 0 {
839 panic!("roll: index cannot be negative (got {})", n_raw);
840 }
841 let n = n_raw as usize;
842
843 if n == 0 {
844 return sp;
845 }
846 if n == 1 {
847 return patch_seq_swap(sp);
848 }
849 if n == 2 {
850 return patch_seq_rot(sp);
851 }
852
853 let base = get_stack_base();
855 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
856 if n >= depth {
857 panic!(
858 "roll: index {} exceeds stack depth {} (need at least {} values)",
859 n,
860 depth,
861 n + 1
862 );
863 }
864
865 let src_ptr = sp.sub(n + 1);
867 let saved = *src_ptr;
868
869 std::ptr::copy(src_ptr.add(1), src_ptr, n);
871
872 *sp.sub(1) = saved;
874
875 sp
876 }
877}
878
879pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
886 unsafe {
887 for i in 0..count {
888 let sv = *src.sub(count - i);
889 let cloned = clone_stack_value(&sv);
890 *dst.add(i) = cloned;
891 }
892 }
893}
894
895use std::cell::Cell;
904
905may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
907
908#[unsafe(no_mangle)]
913pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
914 STACK_BASE.with(|cell| {
915 cell.set(base as usize);
916 });
917}
918
919#[inline]
921pub fn get_stack_base() -> Stack {
922 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
923}
924
925#[unsafe(no_mangle)]
934pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
935 unsafe {
936 let (new_sp, _base) = clone_stack_with_base(sp);
937 new_sp
938 }
939}
940
941pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
949 let base = get_stack_base();
950 if base.is_null() {
951 panic!("clone_stack: stack base not set");
952 }
953
954 let depth = unsafe { sp.offset_from(base) as usize };
956
957 if depth == 0 {
958 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
960 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
961 let new_base = new_stack.base;
962 std::mem::forget(new_stack); return (new_base, new_base);
964 }
965
966 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
968 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
969 let new_stack = TaggedStack::new(capacity);
970 let new_base = new_stack.base;
971 std::mem::forget(new_stack); unsafe {
975 for i in 0..depth {
976 let sv = &*base.add(i);
977 let cloned = clone_stack_value(sv);
978 *new_base.add(i) = cloned;
979 }
980 }
981
982 unsafe { (new_base.add(depth), new_base) }
984}
985
986pub use patch_seq_2dup as two_dup;
991pub use patch_seq_3drop as three_drop;
992pub use patch_seq_dup as dup;
993pub use patch_seq_nip as nip;
994pub use patch_seq_over as over;
995pub use patch_seq_pick_op as pick;
996pub use patch_seq_roll as roll;
997pub use patch_seq_rot as rot;
998pub use patch_seq_swap as swap;
999pub use patch_seq_tuck as tuck;
1000
1001pub fn alloc_stack() -> Stack {
1013 use crate::tagged_stack::TaggedStack;
1014 let stack = TaggedStack::with_default_capacity();
1015 let base = stack.base;
1016 std::mem::forget(stack); base
1018}
1019
1020#[cfg(test)]
1023pub fn alloc_test_stack() -> Stack {
1024 let stack = alloc_stack();
1025 unsafe { patch_seq_set_stack_base(stack) };
1026 stack
1027}
1028
1029#[unsafe(no_mangle)]
1039pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
1040 let base = get_stack_base();
1041 if base.is_null() {
1042 eprintln!("[stack.dump: base not set]");
1043 return sp;
1044 }
1045
1046 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
1047
1048 if depth == 0 {
1049 println!("stack:");
1050 } else {
1051 use std::io::Write;
1052 print!("stack: ");
1053 for i in 0..depth {
1054 if i > 0 {
1055 print!(" ");
1056 }
1057 unsafe {
1058 let sv = *base.add(i);
1059 print_stack_value(&sv);
1060 }
1061 }
1062 println!();
1063 let _ = std::io::stdout().flush();
1066
1067 for i in 0..depth {
1069 unsafe {
1070 let sv = *base.add(i);
1071 drop_stack_value(sv);
1072 }
1073 }
1074 }
1075
1076 base
1078}
1079
1080fn print_stack_value(sv: &StackValue) {
1087 use crate::son::{SonConfig, value_to_son};
1088
1089 let cloned = unsafe { clone_stack_value(sv) };
1095 let value = unsafe { stack_value_to_value(cloned) };
1096 let son = value_to_son(&value, &SonConfig::compact());
1097 print!("{}", son);
1098}
1099
1100#[macro_export]
1102macro_rules! test_stack {
1103 () => {{
1104 use $crate::tagged_stack::StackValue;
1105 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1106 unsafe { BUFFER.as_mut_ptr() }
1107 }};
1108}