1use crate::tagged_stack::StackValue;
11use crate::value::Value;
12use std::sync::Arc;
13
14pub type Stack = *mut StackValue;
19
20pub const DISC_INT: u64 = 0;
22pub const DISC_FLOAT: u64 = 1;
23pub const DISC_BOOL: u64 = 2;
24pub const DISC_STRING: u64 = 3;
25pub const DISC_VARIANT: u64 = 4;
26pub const DISC_MAP: u64 = 5;
27pub const DISC_QUOTATION: u64 = 6;
28pub const DISC_CLOSURE: u64 = 7;
29pub const DISC_CHANNEL: u64 = 8;
30pub const DISC_WEAVECTX: u64 = 9;
31
32#[inline]
34pub fn value_to_stack_value(value: Value) -> StackValue {
35 match value {
36 Value::Int(i) => StackValue {
37 slot0: DISC_INT,
38 slot1: i as u64,
39 slot2: 0,
40 slot3: 0,
41 slot4: 0,
42 },
43 Value::Float(f) => StackValue {
44 slot0: DISC_FLOAT,
45 slot1: f.to_bits(),
46 slot2: 0,
47 slot3: 0,
48 slot4: 0,
49 },
50 Value::Bool(b) => StackValue {
51 slot0: DISC_BOOL,
52 slot1: if b { 1 } else { 0 },
53 slot2: 0,
54 slot3: 0,
55 slot4: 0,
56 },
57 Value::String(s) => {
58 let (ptr, len, capacity, global) = s.into_raw_parts();
61 StackValue {
62 slot0: DISC_STRING,
63 slot1: ptr as u64,
64 slot2: len as u64,
65 slot3: capacity as u64,
66 slot4: if global { 1 } else { 0 },
67 }
68 }
69 Value::Variant(v) => {
70 let ptr = Arc::into_raw(v) as u64;
71 StackValue {
72 slot0: DISC_VARIANT,
73 slot1: ptr,
74 slot2: 0,
75 slot3: 0,
76 slot4: 0,
77 }
78 }
79 Value::Map(m) => {
80 let ptr = Box::into_raw(m) as u64;
81 StackValue {
82 slot0: DISC_MAP,
83 slot1: ptr,
84 slot2: 0,
85 slot3: 0,
86 slot4: 0,
87 }
88 }
89 Value::Quotation { wrapper, impl_ } => StackValue {
90 slot0: DISC_QUOTATION,
91 slot1: wrapper as u64,
92 slot2: impl_ as u64,
93 slot3: 0,
94 slot4: 0,
95 },
96 Value::Closure { fn_ptr, env } => {
97 let env_box = Box::new(env);
99 let env_ptr = Box::into_raw(env_box) as u64;
100 StackValue {
101 slot0: DISC_CLOSURE,
102 slot1: fn_ptr as u64,
103 slot2: env_ptr,
104 slot3: 0,
105 slot4: 0,
106 }
107 }
108 Value::Channel(ch) => {
109 let ptr = Arc::into_raw(ch) as u64;
111 StackValue {
112 slot0: DISC_CHANNEL,
113 slot1: ptr,
114 slot2: 0,
115 slot3: 0,
116 slot4: 0,
117 }
118 }
119 Value::WeaveCtx {
120 yield_chan,
121 resume_chan,
122 } => {
123 let yield_ptr = Arc::into_raw(yield_chan) as u64;
125 let resume_ptr = Arc::into_raw(resume_chan) as u64;
126 StackValue {
127 slot0: DISC_WEAVECTX,
128 slot1: yield_ptr,
129 slot2: resume_ptr,
130 slot3: 0,
131 slot4: 0,
132 }
133 }
134 }
135}
136
137#[inline]
142pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
143 unsafe {
144 match sv.slot0 {
145 DISC_INT => Value::Int(sv.slot1 as i64),
146 DISC_FLOAT => Value::Float(f64::from_bits(sv.slot1)),
147 DISC_BOOL => Value::Bool(sv.slot1 != 0),
148 DISC_STRING => {
149 use crate::seqstring::SeqString;
150 let ptr = sv.slot1 as *const u8;
151 let len = sv.slot2 as usize;
152 let capacity = sv.slot3 as usize;
153 let global = sv.slot4 != 0;
154 Value::String(SeqString::from_raw_parts(ptr, len, capacity, global))
155 }
156 DISC_VARIANT => {
157 use crate::value::VariantData;
158 let arc = Arc::from_raw(sv.slot1 as *const VariantData);
159 Value::Variant(arc)
160 }
161 DISC_MAP => {
162 use crate::value::MapKey;
163 use std::collections::HashMap;
164 let boxed = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
165 Value::Map(boxed)
166 }
167 DISC_QUOTATION => Value::Quotation {
168 wrapper: sv.slot1 as usize,
169 impl_: sv.slot2 as usize,
170 },
171 DISC_CLOSURE => {
172 let env_box = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
174 Value::Closure {
175 fn_ptr: sv.slot1 as usize,
176 env: *env_box,
177 }
178 }
179 DISC_CHANNEL => {
180 use crate::value::ChannelData;
181 let arc = Arc::from_raw(sv.slot1 as *const ChannelData);
182 Value::Channel(arc)
183 }
184 DISC_WEAVECTX => {
185 use crate::value::WeaveChannelData;
186 let yield_chan = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
187 let resume_chan = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
188 Value::WeaveCtx {
189 yield_chan,
190 resume_chan,
191 }
192 }
193 _ => panic!("Invalid discriminant: {}", sv.slot0),
194 }
195 }
196}
197
198#[unsafe(no_mangle)]
205pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
206 unsafe {
207 let sv = &*src;
208 let cloned = clone_stack_value(sv);
209 *dst = cloned;
210 }
211}
212
213#[inline]
228pub unsafe fn clone_stack_value(sv: &StackValue) -> StackValue {
229 unsafe {
230 match sv.slot0 {
231 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => *sv,
232 DISC_STRING => {
233 let ptr = sv.slot1 as *const u8;
235 let len = sv.slot2 as usize;
236 debug_assert!(!ptr.is_null(), "String pointer is null");
237 let slice = std::slice::from_raw_parts(ptr, len);
239 #[cfg(debug_assertions)]
241 let s = std::str::from_utf8(slice).expect("Invalid UTF-8 in string clone");
242 #[cfg(not(debug_assertions))]
243 let s = std::str::from_utf8_unchecked(slice);
244 let cloned = crate::seqstring::global_string(s.to_string());
246 let (new_ptr, new_len, new_cap, new_global) = cloned.into_raw_parts();
247 StackValue {
248 slot0: DISC_STRING,
249 slot1: new_ptr as u64,
250 slot2: new_len as u64,
251 slot3: new_cap as u64,
252 slot4: if new_global { 1 } else { 0 },
253 }
254 }
255 DISC_VARIANT => {
256 use crate::value::VariantData;
257 let ptr = sv.slot1 as *const VariantData;
258 debug_assert!(!ptr.is_null(), "Variant pointer is null");
259 debug_assert!(
260 (ptr as usize).is_multiple_of(std::mem::align_of::<VariantData>()),
261 "Variant pointer is misaligned"
262 );
263 let arc = Arc::from_raw(ptr);
264 let cloned = Arc::clone(&arc);
265 std::mem::forget(arc);
266 StackValue {
267 slot0: DISC_VARIANT,
268 slot1: Arc::into_raw(cloned) as u64,
269 slot2: 0,
270 slot3: 0,
271 slot4: 0,
272 }
273 }
274 DISC_MAP => {
275 use crate::value::MapKey;
277 use std::collections::HashMap;
278 let ptr = sv.slot1 as *mut HashMap<MapKey, Value>;
279 debug_assert!(!ptr.is_null(), "Map pointer is null");
280 debug_assert!(
281 (ptr as usize).is_multiple_of(std::mem::align_of::<HashMap<MapKey, Value>>()),
282 "Map pointer is misaligned"
283 );
284 let boxed = Box::from_raw(ptr);
285 let cloned = boxed.clone();
286 std::mem::forget(boxed);
287 StackValue {
288 slot0: DISC_MAP,
289 slot1: Box::into_raw(cloned) as u64,
290 slot2: 0,
291 slot3: 0,
292 slot4: 0,
293 }
294 }
295 DISC_CLOSURE => {
296 let env_box_ptr = sv.slot2 as *mut Arc<[Value]>;
298 debug_assert!(!env_box_ptr.is_null(), "Closure env pointer is null");
299 debug_assert!(
300 (env_box_ptr as usize).is_multiple_of(std::mem::align_of::<Arc<[Value]>>()),
301 "Closure env pointer is misaligned"
302 );
303 let env_arc = &*env_box_ptr;
304 let cloned_env = Arc::clone(env_arc);
305 let new_env_box = Box::new(cloned_env);
307 StackValue {
308 slot0: DISC_CLOSURE,
309 slot1: sv.slot1,
310 slot2: Box::into_raw(new_env_box) as u64,
311 slot3: 0,
312 slot4: 0,
313 }
314 }
315 DISC_CHANNEL => {
316 use crate::value::ChannelData;
318 let ptr = sv.slot1 as *const ChannelData;
319 debug_assert!(!ptr.is_null(), "Channel pointer is null");
320 let arc = Arc::from_raw(ptr);
321 let cloned = Arc::clone(&arc);
322 std::mem::forget(arc);
323 StackValue {
324 slot0: DISC_CHANNEL,
325 slot1: Arc::into_raw(cloned) as u64,
326 slot2: 0,
327 slot3: 0,
328 slot4: 0,
329 }
330 }
331 DISC_WEAVECTX => {
332 use crate::value::WeaveChannelData;
334 let yield_ptr = sv.slot1 as *const WeaveChannelData;
335 let resume_ptr = sv.slot2 as *const WeaveChannelData;
336 debug_assert!(!yield_ptr.is_null(), "WeaveCtx yield pointer is null");
337 debug_assert!(!resume_ptr.is_null(), "WeaveCtx resume pointer is null");
338 let yield_arc = Arc::from_raw(yield_ptr);
339 let resume_arc = Arc::from_raw(resume_ptr);
340 let yield_cloned = Arc::clone(&yield_arc);
341 let resume_cloned = Arc::clone(&resume_arc);
342 std::mem::forget(yield_arc);
343 std::mem::forget(resume_arc);
344 StackValue {
345 slot0: DISC_WEAVECTX,
346 slot1: Arc::into_raw(yield_cloned) as u64,
347 slot2: Arc::into_raw(resume_cloned) as u64,
348 slot3: 0,
349 slot4: 0,
350 }
351 }
352 _ => panic!("Invalid discriminant for clone: {}", sv.slot0),
353 }
354 }
355}
356
357#[inline]
362pub unsafe fn drop_stack_value(sv: StackValue) {
363 unsafe {
364 match sv.slot0 {
365 DISC_INT | DISC_FLOAT | DISC_BOOL | DISC_QUOTATION => {
366 }
368 DISC_STRING => {
369 use crate::seqstring::SeqString;
371 let ptr = sv.slot1 as *const u8;
372 let len = sv.slot2 as usize;
373 let capacity = sv.slot3 as usize;
374 let global = sv.slot4 != 0;
375 let _ = SeqString::from_raw_parts(ptr, len, capacity, global);
376 }
378 DISC_VARIANT => {
379 use crate::value::VariantData;
380 let _ = Arc::from_raw(sv.slot1 as *const VariantData);
381 }
382 DISC_MAP => {
383 use crate::value::MapKey;
384 use std::collections::HashMap;
385 let _ = Box::from_raw(sv.slot1 as *mut HashMap<MapKey, Value>);
386 }
387 DISC_CLOSURE => {
388 let _ = Box::from_raw(sv.slot2 as *mut Arc<[Value]>);
390 }
391 DISC_CHANNEL => {
392 use crate::value::ChannelData;
393 let _ = Arc::from_raw(sv.slot1 as *const ChannelData);
394 }
395 DISC_WEAVECTX => {
396 use crate::value::WeaveChannelData;
397 let _ = Arc::from_raw(sv.slot1 as *const WeaveChannelData);
398 let _ = Arc::from_raw(sv.slot2 as *const WeaveChannelData);
399 }
400 _ => panic!("Invalid discriminant for drop: {}", sv.slot0),
401 }
402 }
403}
404
405#[inline]
416pub unsafe fn push(stack: Stack, value: Value) -> Stack {
417 unsafe {
418 let sv = value_to_stack_value(value);
419 *stack = sv;
420 stack.add(1)
421 }
422}
423
424#[inline]
429pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
430 unsafe {
431 *stack = sv;
432 stack.add(1)
433 }
434}
435
436#[inline]
443pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
444 unsafe {
445 let new_sp = stack.sub(1);
446 let sv = *new_sp;
447 (new_sp, stack_value_to_value(sv))
448 }
449}
450
451#[inline]
456pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
457 unsafe {
458 let new_sp = stack.sub(1);
459 let sv = *new_sp;
460 (new_sp, sv)
461 }
462}
463
464#[inline]
472pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
473 unsafe {
474 let (sp, b) = pop(stack);
475 let (sp, a) = pop(sp);
476 (sp, a, b)
477 }
478}
479
480#[inline]
485pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
486 unsafe {
487 let (sp, c) = pop(stack);
488 let (sp, b) = pop(sp);
489 let (sp, a) = pop(sp);
490 (sp, a, b, c)
491 }
492}
493
494#[inline]
499pub unsafe fn peek(stack: Stack) -> Value {
500 unsafe {
501 let sv = *stack.sub(1);
502 stack_value_to_value(clone_stack_value(&sv))
504 }
505}
506
507#[inline]
512pub unsafe fn peek_sv(stack: Stack) -> StackValue {
513 unsafe { *stack.sub(1) }
514}
515
516#[inline]
519pub fn is_empty(_stack: Stack) -> bool {
520 false
523}
524
525#[unsafe(no_mangle)]
534pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
535 unsafe {
536 let sv = peek_sv(stack);
537 let cloned = clone_stack_value(&sv);
538 push_sv(stack, cloned)
539 }
540}
541
542#[inline]
547pub unsafe fn drop_top(stack: Stack) -> Stack {
548 unsafe {
549 let (new_sp, sv) = pop_sv(stack);
550 drop_stack_value(sv);
551 new_sp
552 }
553}
554
555#[unsafe(no_mangle)]
560pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
561 unsafe { drop_top(stack) }
562}
563
564#[allow(improper_ctypes_definitions)]
569#[unsafe(no_mangle)]
570pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
571 unsafe { push(stack, value) }
572}
573
574#[unsafe(no_mangle)]
579pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
580 unsafe {
581 let ptr_b = stack.sub(1);
582 let ptr_a = stack.sub(2);
583 let a = *ptr_a;
584 let b = *ptr_b;
585 *ptr_a = b;
586 *ptr_b = a;
587 stack
588 }
589}
590
591#[unsafe(no_mangle)]
596pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
597 unsafe {
598 let sv_a = *stack.sub(2);
599 let cloned = clone_stack_value(&sv_a);
600 push_sv(stack, cloned)
601 }
602}
603
604#[unsafe(no_mangle)]
609pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
610 unsafe {
611 let ptr_c = stack.sub(1);
612 let ptr_b = stack.sub(2);
613 let ptr_a = stack.sub(3);
614 let a = *ptr_a;
615 let b = *ptr_b;
616 let c = *ptr_c;
617 *ptr_a = b;
618 *ptr_b = c;
619 *ptr_c = a;
620 stack
621 }
622}
623
624#[unsafe(no_mangle)]
629pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
630 unsafe {
631 let ptr_b = stack.sub(1);
632 let ptr_a = stack.sub(2);
633 let a = *ptr_a;
634 let b = *ptr_b;
635 drop_stack_value(a);
636 *ptr_a = b;
637 stack.sub(1)
638 }
639}
640
641#[unsafe(no_mangle)]
646pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
647 unsafe {
648 let ptr_b = stack.sub(1);
649 let ptr_a = stack.sub(2);
650 let a = *ptr_a;
651 let b = *ptr_b;
652 let b_clone = clone_stack_value(&b);
653 *ptr_a = b;
654 *ptr_b = a;
655 push_sv(stack, b_clone)
656 }
657}
658
659#[unsafe(no_mangle)]
664pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
665 unsafe {
666 let sv_a = *stack.sub(2);
667 let sv_b = *stack.sub(1);
668 let a_clone = clone_stack_value(&sv_a);
669 let b_clone = clone_stack_value(&sv_b);
670 let sp = push_sv(stack, a_clone);
671 push_sv(sp, b_clone)
672 }
673}
674
675#[unsafe(no_mangle)]
680pub unsafe extern "C" fn patch_seq_3drop(stack: Stack) -> Stack {
681 unsafe {
682 let (sp, sv_c) = pop_sv(stack);
683 let (sp, sv_b) = pop_sv(sp);
684 let (sp, sv_a) = pop_sv(sp);
685 drop_stack_value(sv_c);
686 drop_stack_value(sv_b);
687 drop_stack_value(sv_a);
688 sp
689 }
690}
691
692#[unsafe(no_mangle)]
703pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
704 unsafe {
705 let (sp, n_val) = pop(stack);
707 let n_raw = match n_val {
708 Value::Int(i) => i,
709 _ => panic!("pick: expected Int"),
710 };
711
712 if n_raw < 0 {
714 panic!("pick: index cannot be negative (got {})", n_raw);
715 }
716 let n = n_raw as usize;
717
718 let base = get_stack_base();
720 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
721 if n >= depth {
722 panic!(
723 "pick: index {} exceeds stack depth {} (need at least {} values)",
724 n,
725 depth,
726 n + 1
727 );
728 }
729
730 let sv = *sp.sub(n + 1);
732 let cloned = clone_stack_value(&sv);
733 push_sv(sp, cloned)
734 }
735}
736
737#[unsafe(no_mangle)]
748pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
749 unsafe {
750 let (sp, n_val) = pop(stack);
752 let n_raw = match n_val {
753 Value::Int(i) => i,
754 _ => panic!("roll: expected Int"),
755 };
756
757 if n_raw < 0 {
759 panic!("roll: index cannot be negative (got {})", n_raw);
760 }
761 let n = n_raw as usize;
762
763 if n == 0 {
764 return sp;
765 }
766 if n == 1 {
767 return patch_seq_swap(sp);
768 }
769 if n == 2 {
770 return patch_seq_rot(sp);
771 }
772
773 let base = get_stack_base();
775 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
776 if n >= depth {
777 panic!(
778 "roll: index {} exceeds stack depth {} (need at least {} values)",
779 n,
780 depth,
781 n + 1
782 );
783 }
784
785 let src_ptr = sp.sub(n + 1);
787 let saved = *src_ptr;
788
789 std::ptr::copy(src_ptr.add(1), src_ptr, n);
791
792 *sp.sub(1) = saved;
794
795 sp
796 }
797}
798
799pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
806 unsafe {
807 for i in 0..count {
808 let sv = *src.sub(count - i);
809 let cloned = clone_stack_value(&sv);
810 *dst.add(i) = cloned;
811 }
812 }
813}
814
815use std::cell::Cell;
824
825may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
827
828#[unsafe(no_mangle)]
833pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
834 STACK_BASE.with(|cell| {
835 cell.set(base as usize);
836 });
837}
838
839#[inline]
841pub fn get_stack_base() -> Stack {
842 STACK_BASE.with(|cell| cell.get() as *mut StackValue)
843}
844
845#[unsafe(no_mangle)]
854pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
855 unsafe {
856 let (new_sp, _base) = clone_stack_with_base(sp);
857 new_sp
858 }
859}
860
861pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
869 let base = get_stack_base();
870 if base.is_null() {
871 panic!("clone_stack: stack base not set");
872 }
873
874 let depth = unsafe { sp.offset_from(base) as usize };
876
877 if depth == 0 {
878 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
880 let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
881 let new_base = new_stack.base;
882 std::mem::forget(new_stack); return (new_base, new_base);
884 }
885
886 use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
888 let capacity = depth.max(DEFAULT_STACK_CAPACITY);
889 let new_stack = TaggedStack::new(capacity);
890 let new_base = new_stack.base;
891 std::mem::forget(new_stack); unsafe {
895 for i in 0..depth {
896 let sv = &*base.add(i);
897 let cloned = clone_stack_value(sv);
898 *new_base.add(i) = cloned;
899 }
900 }
901
902 unsafe { (new_base.add(depth), new_base) }
904}
905
906pub use patch_seq_2dup as two_dup;
911pub use patch_seq_3drop as three_drop;
912pub use patch_seq_dup as dup;
913pub use patch_seq_nip as nip;
914pub use patch_seq_over as over;
915pub use patch_seq_pick_op as pick;
916pub use patch_seq_roll as roll;
917pub use patch_seq_rot as rot;
918pub use patch_seq_swap as swap;
919pub use patch_seq_tuck as tuck;
920
921pub fn alloc_stack() -> Stack {
933 use crate::tagged_stack::TaggedStack;
934 let stack = TaggedStack::with_default_capacity();
935 let base = stack.base;
936 std::mem::forget(stack); base
938}
939
940#[cfg(test)]
943pub fn alloc_test_stack() -> Stack {
944 let stack = alloc_stack();
945 unsafe { patch_seq_set_stack_base(stack) };
946 stack
947}
948
949#[unsafe(no_mangle)]
959pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
960 let base = get_stack_base();
961 if base.is_null() {
962 eprintln!("[stack.dump: base not set]");
963 return sp;
964 }
965
966 let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
967
968 if depth == 0 {
969 println!("stack:");
970 } else {
971 use std::io::Write;
972 print!("stack: ");
973 for i in 0..depth {
974 if i > 0 {
975 print!(" ");
976 }
977 unsafe {
978 let sv = *base.add(i);
979 print_stack_value(&sv);
980 }
981 }
982 println!();
983 let _ = std::io::stdout().flush();
986
987 for i in 0..depth {
989 unsafe {
990 let sv = *base.add(i);
991 drop_stack_value(sv);
992 }
993 }
994 }
995
996 base
998}
999
1000fn print_stack_value(sv: &StackValue) {
1007 match sv.slot0 {
1008 DISC_INT => print!("{}", sv.slot1 as i64),
1009 DISC_FLOAT => {
1010 let f = f64::from_bits(sv.slot1);
1011 if f.fract() == 0.0 && f.is_finite() {
1012 print!("{}.0", f)
1013 } else {
1014 print!("{}", f)
1015 }
1016 }
1017 DISC_BOOL => print!("{}", if sv.slot1 != 0 { "true" } else { "false" }),
1018 DISC_STRING => {
1019 let ptr = sv.slot1 as *const u8;
1020 let len = sv.slot2 as usize;
1021 if ptr.is_null() || len == 0 {
1025 print!("\"\"");
1026 } else if len > 10_000_000 {
1027 print!("<string:invalid length {}>", len);
1029 } else {
1030 unsafe {
1031 let slice = std::slice::from_raw_parts(ptr, len);
1032 if let Ok(s) = std::str::from_utf8(slice) {
1033 print!("\"{}\"", s);
1034 } else {
1035 print!("<string:{} bytes, non-utf8>", len);
1036 }
1037 }
1038 }
1039 }
1040 DISC_VARIANT => print!("<variant>"),
1041 DISC_MAP => print!("<map>"),
1042 DISC_QUOTATION => print!("<quotation>"),
1043 DISC_CLOSURE => print!("<closure>"),
1044 DISC_CHANNEL => print!("<channel>"),
1045 _ => print!("<unknown:{}>", sv.slot0),
1046 }
1047}
1048
1049#[macro_export]
1051macro_rules! test_stack {
1052 () => {{
1053 use $crate::tagged_stack::StackValue;
1054 static mut BUFFER: [StackValue; 256] = unsafe { std::mem::zeroed() };
1055 unsafe { BUFFER.as_mut_ptr() }
1056 }};
1057}