1use crate::context::VMContext;
2use crate::gc::frame_walker::{self, StackRoot};
3use crate::layout;
4use crate::stack_map::StackMapRegistry;
5use std::cell::{Cell, RefCell};
6use std::fmt;
7use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
8use tidepool_heap::layout as heap_layout;
9
10type GcHook = fn(&[StackRoot]);
11
12const MIN_VALID_ADDR: u64 = 0x1000;
14
15#[derive(Debug, Clone)]
17pub enum RuntimeError {
18 DivisionByZero,
19 Overflow,
20 UserError,
21 Undefined,
22 TypeMetadata,
23 UnresolvedVar(u64),
24 NullFunPtr,
25 BadFunPtrTag(u8),
26 HeapOverflow,
27 StackOverflow,
28 BlackHole,
29 BadThunkState(u8),
30 UserErrorMsg(String), }
32
33impl std::fmt::Display for RuntimeError {
34 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
35 match self {
36 RuntimeError::DivisionByZero => write!(f, "division by zero"),
37 RuntimeError::Overflow => write!(f, "arithmetic overflow"),
38 RuntimeError::UserError => write!(f, "Haskell error called"),
39 RuntimeError::UserErrorMsg(msg) => write!(f, "Haskell error: {}", msg),
40 RuntimeError::Undefined => write!(f, "Haskell undefined forced"),
41 RuntimeError::TypeMetadata => write!(f, "forced type metadata (should be dead code)"),
42 RuntimeError::UnresolvedVar(id) => {
43 let tag_char = (*id >> 56) as u8 as char;
44 let key = *id & ((1u64 << 56) - 1);
45 write!(
46 f,
47 "unresolved variable VarId({:#x}) [tag='{}', key={}]",
48 id, tag_char, key
49 )
50 }
51 RuntimeError::NullFunPtr => write!(f, "application of null function pointer"),
52 RuntimeError::BadFunPtrTag(tag) => {
53 write!(f, "application of non-closure (tag={})", tag)
54 }
55 RuntimeError::HeapOverflow => write!(f, "heap overflow (nursery exhausted after GC)"),
56 RuntimeError::StackOverflow => write!(f, "stack overflow (likely infinite list or unbounded recursion — use zipWithIndex/imap/enumFromTo instead of [0..])"),
57 RuntimeError::BlackHole => write!(f, "blackhole detected (infinite loop: thunk forced itself)"),
58 RuntimeError::BadThunkState(state) => write!(f, "thunk has invalid evaluation state: {}", state),
59 }
60 }
61}
62
63thread_local! {
64 static STACK_MAP_REGISTRY: RefCell<Option<*const StackMapRegistry>> = const { RefCell::new(None) };
67
68 static LAST_ROOTS: RefCell<Vec<StackRoot>> = const { RefCell::new(Vec::new()) };
71
72 static HOOK: RefCell<Option<GcHook>> = const { RefCell::new(None) };
73
74 static RUNTIME_ERROR: RefCell<Option<RuntimeError>> = const { RefCell::new(None) };
76
77 pub(crate) static GC_STATE: RefCell<Option<GcState>> = const { RefCell::new(None) };
78
79 static CALL_DEPTH: Cell<u32> = const { Cell::new(0) };
82
83 static DIAGNOSTICS: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
85
86 static EXEC_CONTEXT: RefCell<String> = const { RefCell::new(String::new()) };
87 pub(crate) static SIGNAL_SAFE_CTX: Cell<[u8; 128]> = const { Cell::new([0u8; 128]) };
88 pub(crate) static SIGNAL_SAFE_CTX_LEN: Cell<usize> = const { Cell::new(0) };
89}
90
91pub fn set_exec_context(ctx: &str) {
94 EXEC_CONTEXT.with(|c| {
95 let mut s = c.borrow_mut();
96 s.clear();
97 s.push_str(ctx);
98 });
99 SIGNAL_SAFE_CTX.with(|c| {
100 let mut buf = [0u8; 128];
101 let len = ctx.len().min(128);
102 buf[..len].copy_from_slice(&ctx.as_bytes()[..len]);
103 c.set(buf);
104 });
105 SIGNAL_SAFE_CTX_LEN.with(|c| c.set(ctx.len().min(128)));
106}
107
108pub fn get_exec_context() -> String {
110 EXEC_CONTEXT.with(|c| c.borrow().clone())
111}
112
113pub fn push_diagnostic(msg: String) {
115 DIAGNOSTICS.with(|d| d.borrow_mut().push(msg));
116}
117
118pub fn drain_diagnostics() -> Vec<String> {
120 DIAGNOSTICS.with(|d| d.borrow_mut().drain(..).collect())
121}
122
123pub(crate) struct GcState {
125 pub active_start: *mut u8,
126 pub active_size: usize,
127 pub active_buffer: Option<Vec<u8>>,
128}
129
130unsafe impl Send for GcState {}
132
133pub fn set_gc_state(start: *mut u8, size: usize) {
135 GC_STATE.with(|cell| {
136 *cell.borrow_mut() = Some(GcState {
137 active_start: start,
138 active_size: size,
139 active_buffer: None,
140 });
141 });
142}
143
144pub fn clear_gc_state() {
146 GC_STATE.with(|cell| {
147 cell.borrow_mut().take();
148 });
149}
150
151#[inline(never)]
159#[allow(clippy::not_unsafe_ptr_arg_deref)]
160pub extern "C" fn gc_trigger(vmctx: *mut VMContext) {
161 let mut _dummy = [0u64; 2];
163 std::hint::black_box(&mut _dummy);
164
165 GC_TRIGGER_CALL_COUNT.fetch_add(1, Ordering::SeqCst);
166 GC_TRIGGER_LAST_VMCTX.store(vmctx as usize, Ordering::SeqCst);
167
168 #[cfg(target_arch = "x86_64")]
169 {
170 let fp: usize;
171 unsafe {
174 std::arch::asm!("mov {}, rbp", out(reg) fp, options(nomem, nostack));
175 }
176 perform_gc(fp, vmctx);
177 }
178
179 #[cfg(target_arch = "aarch64")]
180 {
181 let fp: usize;
182 unsafe {
184 std::arch::asm!("mov {}, x29", out(reg) fp, options(nomem, nostack));
185 }
186 perform_gc(fp, vmctx);
187 }
188}
189
190#[inline(never)]
192fn perform_gc(fp: usize, vmctx: *mut VMContext) {
193 STACK_MAP_REGISTRY.with(|reg_cell| {
194 if let Some(registry_ptr) = *reg_cell.borrow() {
195 let registry = unsafe { &*registry_ptr };
197 let roots = unsafe { frame_walker::walk_frames(fp, registry) };
200
201 GC_STATE.with(|gc_cell| {
203 let mut gc_state = gc_cell.borrow_mut();
204 if let Some(state) = gc_state.as_mut() {
205 let from_start = state.active_start;
206 let from_size = state.active_size;
207 let from_end = unsafe { from_start.add(from_size) };
209
210 let mut tospace = vec![0u8; from_size];
211
212 let root_slots: Vec<*mut *mut u8> = roots
214 .iter()
215 .map(|r| r.stack_slot_addr as *mut *mut u8)
216 .collect();
217
218 let result = unsafe {
222 tidepool_heap::gc::raw::cheney_copy(
223 &root_slots,
224 from_start as *const u8,
225 from_end as *const u8,
226 &mut tospace,
227 )
228 };
229
230 let to_start = tospace.as_mut_ptr();
232 state.active_start = to_start;
233 state.active_buffer = Some(tospace); unsafe {
239 (*vmctx).alloc_ptr = to_start.add(result.bytes_copied);
240 (*vmctx).alloc_limit = to_start.add(from_size) as *const u8;
241 }
242 }
243 });
244 HOOK.with(|hook_cell| {
248 if let Some(hook) = *hook_cell.borrow() {
249 hook(&roots);
250 }
251 });
252
253 LAST_ROOTS.with(|roots_cell| {
254 *roots_cell.borrow_mut() = roots;
255 });
256 }
257 });
258}
259
260pub fn set_gc_test_hook(hook: GcHook) {
262 HOOK.with(|hook_cell| {
263 *hook_cell.borrow_mut() = Some(hook);
264 });
265}
266
267pub fn clear_gc_test_hook() {
269 HOOK.with(|hook_cell| {
270 *hook_cell.borrow_mut() = None;
271 });
272}
273
274pub fn set_stack_map_registry(registry: &StackMapRegistry) {
280 STACK_MAP_REGISTRY.with(|reg_cell| {
281 *reg_cell.borrow_mut() = Some(registry as *const _);
282 });
283}
284
285pub fn clear_stack_map_registry() {
287 STACK_MAP_REGISTRY.with(|reg_cell| {
288 *reg_cell.borrow_mut() = None;
289 });
290}
291
292pub fn last_gc_roots() -> Vec<StackRoot> {
294 LAST_ROOTS.with(|roots_cell| roots_cell.borrow().clone())
295}
296
297#[allow(clippy::not_unsafe_ptr_arg_deref)]
299pub extern "C" fn heap_force(vmctx: *mut VMContext, obj: *mut u8) -> *mut u8 {
300 if obj.is_null() {
301 return obj;
302 }
303
304 unsafe {
308 let mut current = obj;
309
310 loop {
311 let tag = heap_layout::read_tag(current);
312
313 if tag == layout::TAG_THUNK {
314 let state = *current.add(layout::THUNK_STATE_OFFSET as usize);
315 match state {
316 layout::THUNK_UNEVALUATED => {
317 *current.add(layout::THUNK_STATE_OFFSET as usize) = layout::THUNK_BLACKHOLE;
319
320 let code_ptr =
322 *(current.add(layout::THUNK_CODE_PTR_OFFSET as usize) as *const usize);
323
324 if code_ptr == 0 {
325 RUNTIME_ERROR.with(|cell| {
326 *cell.borrow_mut() = Some(RuntimeError::NullFunPtr);
327 });
328 return error_poison_ptr();
329 }
330
331 let f: extern "C" fn(*mut VMContext, *mut u8) -> *mut u8 =
334 std::mem::transmute(code_ptr);
335 let result = f(vmctx, current);
336
337 if heap_layout::read_tag(current) == layout::TAG_FORWARDED {
340 current = *(current.add(8) as *const *mut u8);
341 }
342
343 *(current.add(layout::THUNK_INDIRECTION_OFFSET as usize) as *mut *mut u8) =
345 result;
346
347 *current.add(layout::THUNK_STATE_OFFSET as usize) = layout::THUNK_EVALUATED;
349
350 current = result;
352 continue;
353 }
354 layout::THUNK_BLACKHOLE => {
355 return runtime_blackhole_trap(vmctx);
356 }
357 layout::THUNK_EVALUATED => {
358 let next = *(current.add(layout::THUNK_INDIRECTION_OFFSET as usize)
359 as *const *mut u8);
360 current = next;
361 continue;
362 }
363 other => return runtime_bad_thunk_state_trap(vmctx, other),
364 }
365 }
366
367 return current;
373 }
374 }
375}
376
377#[allow(clippy::not_unsafe_ptr_arg_deref)]
383pub extern "C" fn trampoline_resolve(vmctx: *mut VMContext) -> *mut u8 {
384 unsafe {
388 loop {
389 let callee = (*vmctx).tail_callee;
390 let arg = (*vmctx).tail_arg;
391
392 (*vmctx).tail_callee = std::ptr::null_mut();
394 (*vmctx).tail_arg = std::ptr::null_mut();
395
396 if callee.is_null() {
397 return std::ptr::null_mut();
399 }
400
401 reset_call_depth();
403
404 let code_ptr = *(callee.add(layout::CLOSURE_CODE_PTR_OFFSET as usize) as *const usize);
406
407 let func: unsafe extern "C" fn(*mut VMContext, *mut u8, *mut u8) -> *mut u8 =
409 std::mem::transmute(code_ptr);
410 let result = func(vmctx, callee, arg);
411
412 if !result.is_null() {
413 return result;
415 }
416
417 if (*vmctx).tail_callee.is_null() {
419 return std::ptr::null_mut();
421 }
422
423 }
425 }
426}
427
428static GC_TRIGGER_CALL_COUNT: AtomicU64 = AtomicU64::new(0);
431static GC_TRIGGER_LAST_VMCTX: AtomicUsize = AtomicUsize::new(0);
432
433pub fn reset_test_counters() {
435 GC_TRIGGER_CALL_COUNT.store(0, Ordering::SeqCst);
436 GC_TRIGGER_LAST_VMCTX.store(0, Ordering::SeqCst);
437 LAST_ROOTS.with(|roots_cell| {
438 roots_cell.borrow_mut().clear();
439 });
440}
441
442pub fn gc_trigger_call_count() -> u64 {
444 GC_TRIGGER_CALL_COUNT.load(Ordering::SeqCst)
445}
446
447pub fn gc_trigger_last_vmctx() -> usize {
449 GC_TRIGGER_LAST_VMCTX.load(Ordering::SeqCst)
450}
451
452pub extern "C" fn unresolved_var_trap(var_id: u64) -> *mut u8 {
456 let tag_char = (var_id >> 56) as u8 as char;
457 let key = var_id & ((1u64 << 56) - 1);
458 let msg = format!(
459 "[JIT] Forced unresolved external variable: VarId({:#x}) [tag='{}', key={}]",
460 var_id, tag_char, key
461 );
462 eprintln!("{}", msg);
463 push_diagnostic(msg);
464 RUNTIME_ERROR.with(|cell| {
465 *cell.borrow_mut() = Some(RuntimeError::UnresolvedVar(var_id));
466 });
467 error_poison_ptr()
468}
469
470pub extern "C" fn runtime_error(kind: u64) -> *mut u8 {
477 let err_name = match kind {
478 0 => "DivisionByZero",
479 1 => "Overflow",
480 2 => "UserError",
481 3 => "Undefined",
482 4 => "TypeMetadata",
483 _ => "Unknown",
484 };
485 let msg = format!("[JIT] runtime_error called: kind={} ({})", kind, err_name);
486 eprintln!("{}", msg);
487 push_diagnostic(msg);
488 let err = match kind {
489 0 => RuntimeError::DivisionByZero,
490 1 => RuntimeError::Overflow,
491 2 => RuntimeError::UserError,
492 3 => RuntimeError::Undefined,
493 4 => RuntimeError::TypeMetadata,
494 _ => RuntimeError::UserError,
495 };
496 RUNTIME_ERROR.with(|cell| {
497 *cell.borrow_mut() = Some(err);
498 });
499 error_poison_ptr()
504}
505
506#[allow(clippy::not_unsafe_ptr_arg_deref)]
508pub extern "C" fn runtime_error_with_msg(kind: u64, msg_ptr: *const u8, msg_len: u64) -> *mut u8 {
509 let msg = if !msg_ptr.is_null() && msg_len > 0 {
510 let slice = unsafe { std::slice::from_raw_parts(msg_ptr, msg_len as usize) };
513 String::from_utf8_lossy(slice).to_string()
514 } else {
515 String::new()
516 };
517 let err_name = match kind {
518 0 => "DivisionByZero",
519 1 => "Overflow",
520 2 => "UserError",
521 3 => "Undefined",
522 4 => "TypeMetadata",
523 _ => "Unknown",
524 };
525 let diag = format!(
526 "[JIT] runtime_error called: kind={} ({}) msg={:?}",
527 kind, err_name, msg
528 );
529 eprintln!("{}", diag);
530 push_diagnostic(diag);
531 let err = match kind {
532 2 if !msg.is_empty() => RuntimeError::UserErrorMsg(msg),
533 0 => RuntimeError::DivisionByZero,
534 1 => RuntimeError::Overflow,
535 2 => RuntimeError::UserError,
536 3 => RuntimeError::Undefined,
537 4 => RuntimeError::TypeMetadata,
538 _ => RuntimeError::UserError,
539 };
540 RUNTIME_ERROR.with(|cell| {
541 *cell.borrow_mut() = Some(err);
542 });
543 error_poison_ptr()
544}
545
546pub extern "C" fn runtime_oom() -> *mut u8 {
547 RUNTIME_ERROR.with(|cell| {
548 *cell.borrow_mut() = Some(RuntimeError::HeapOverflow);
549 });
550 error_poison_ptr()
551}
552
553pub extern "C" fn runtime_blackhole_trap(_vmctx: *mut VMContext) -> *mut u8 {
555 let msg = "[JIT] BlackHole detected: infinite loop (thunk forcing itself)".to_string();
556 eprintln!("{}", msg);
557 push_diagnostic(msg);
558 RUNTIME_ERROR.with(|cell| {
559 *cell.borrow_mut() = Some(RuntimeError::BlackHole);
560 });
561 error_poison_ptr()
562}
563
564pub extern "C" fn runtime_bad_thunk_state_trap(_vmctx: *mut VMContext, state: u8) -> *mut u8 {
566 let msg = format!("[JIT] Invalid thunk state: {}", state);
567 eprintln!("{}", msg);
568 push_diagnostic(msg);
569 RUNTIME_ERROR.with(|cell| {
570 *cell.borrow_mut() = Some(RuntimeError::BadThunkState(state));
571 });
572 error_poison_ptr()
573}
574
575pub fn error_poison_ptr() -> *mut u8 {
580 use std::sync::OnceLock;
581 static POISON: OnceLock<usize> = OnceLock::new();
584 let addr = *POISON.get_or_init(|| {
585 let size = 24usize;
587 let layout =
588 std::alloc::Layout::from_size_align(size, 8).unwrap_or_else(|_| std::process::abort());
589 let ptr = unsafe { std::alloc::alloc_zeroed(layout) };
591 if ptr.is_null() {
592 std::alloc::handle_alloc_error(layout);
593 }
594 unsafe {
597 tidepool_heap::layout::write_header(
598 ptr,
599 tidepool_heap::layout::TAG_CLOSURE,
600 size as u16,
601 );
602 *(ptr.add(tidepool_heap::layout::CLOSURE_CODE_PTR_OFFSET) as *mut usize) =
604 poison_trampoline as *const () as usize;
605 *(ptr.add(tidepool_heap::layout::CLOSURE_NUM_CAPTURED_OFFSET) as *mut u16) = 0;
607 }
608 ptr as usize
609 });
610 addr as *mut u8
611}
612
613unsafe extern "C" fn poison_trampoline(
619 _vmctx: *mut VMContext,
620 _closure: *mut u8,
621 _arg: *mut u8,
622) -> *mut u8 {
623 error_poison_ptr()
624}
625
626pub fn error_poison_ptr_lazy(kind: u64) -> *mut u8 {
634 use std::sync::OnceLock;
635 static LAZY_POISONS: OnceLock<[usize; 5]> = OnceLock::new();
636 let ptrs = LAZY_POISONS.get_or_init(|| {
637 let mut arr = [0usize; 5];
638 for k in 0..5u64 {
639 let size = 32usize;
641 let lo = std::alloc::Layout::from_size_align(size, 8)
642 .unwrap_or_else(|_| std::process::abort());
643 let ptr = unsafe { std::alloc::alloc_zeroed(lo) };
645 if ptr.is_null() {
646 std::alloc::handle_alloc_error(lo);
647 }
648 unsafe {
651 tidepool_heap::layout::write_header(
652 ptr,
653 tidepool_heap::layout::TAG_CLOSURE,
654 size as u16,
655 );
656 *(ptr.add(tidepool_heap::layout::CLOSURE_CODE_PTR_OFFSET) as *mut usize) =
657 poison_trampoline_lazy as *const () as usize;
658 *(ptr.add(tidepool_heap::layout::CLOSURE_NUM_CAPTURED_OFFSET) as *mut u16) = 1;
659 *(ptr.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET) as *mut u64) = k;
660 }
661 arr[k as usize] = ptr as usize;
662 }
663 arr
664 });
665 ptrs[kind.min(4) as usize] as *mut u8
666}
667
668unsafe extern "C" fn poison_trampoline_lazy(
674 _vmctx: *mut VMContext,
675 closure: *mut u8,
676 arg: *mut u8,
677) -> *mut u8 {
678 let kind = *(closure.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET) as *const u64);
679
680 if !arg.is_null() && tidepool_heap::layout::read_tag(arg) == tidepool_heap::layout::TAG_LIT {
682 let lit_tag = *arg.add(tidepool_heap::layout::LIT_TAG_OFFSET);
683 if lit_tag == 5 {
684 let raw_ptr = *(arg.add(tidepool_heap::layout::LIT_VALUE_OFFSET) as *const *const u8);
686 if !raw_ptr.is_null() {
687 let len = *(raw_ptr as *const u64);
688 let bytes_ptr = raw_ptr.add(8);
689 return runtime_error_with_msg(kind, bytes_ptr, len);
690 }
691 }
692 }
693
694 runtime_error(kind)
695}
696
697pub fn error_poison_ptr_lazy_msg(kind: u64, msg: &[u8]) -> *mut u8 {
699 let msg_bytes = msg.to_vec().into_boxed_slice();
701 let msg_ptr = msg_bytes.as_ptr();
702 let msg_len = msg_bytes.len();
703 std::mem::forget(msg_bytes);
704
705 let size = tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET + 3 * 8;
708 let layout = std::alloc::Layout::from_size_align(size, 8).expect("constant size/align");
709 let ptr = unsafe { std::alloc::alloc_zeroed(layout) };
711 if ptr.is_null() {
712 std::alloc::handle_alloc_error(layout);
713 }
714 unsafe {
718 tidepool_heap::layout::write_header(ptr, tidepool_heap::layout::TAG_CLOSURE, size as u16);
719 *(ptr.add(tidepool_heap::layout::CLOSURE_CODE_PTR_OFFSET) as *mut usize) =
720 poison_trampoline_lazy_msg as *const () as usize;
721 *(ptr.add(tidepool_heap::layout::CLOSURE_NUM_CAPTURED_OFFSET) as *mut u16) = 3;
722 *(ptr.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET) as *mut u64) = kind;
723 *(ptr.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET + 8) as *mut usize) =
724 msg_ptr as usize;
725 *(ptr.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET + 16) as *mut u64) =
726 msg_len as u64;
727 }
728 ptr
729}
730
731unsafe extern "C" fn poison_trampoline_lazy_msg(
734 _vmctx: *mut VMContext,
735 closure: *mut u8,
736 _arg: *mut u8,
737) -> *mut u8 {
738 let kind = *(closure.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET) as *const u64);
739 let msg_ptr =
740 *(closure.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET + 8) as *const *const u8);
741 let msg_len = *(closure.add(tidepool_heap::layout::CLOSURE_CAPTURED_OFFSET + 16) as *const u64);
742 runtime_error_with_msg(kind, msg_ptr, msg_len)
743}
744
745pub fn take_runtime_error() -> Option<RuntimeError> {
747 RUNTIME_ERROR.with(|cell| cell.borrow_mut().take())
748}
749
750pub fn reset_call_depth() {
752 CALL_DEPTH.with(|c| c.set(0));
753}
754
755fn check_ptr_invalid(ptr: *const u8, fn_name: &str) -> bool {
757 if (ptr as i64) < MIN_VALID_ADDR as i64 {
758 let msg = format!("[BUG] {}: bad pointer {:#x}", fn_name, ptr as u64);
759 eprintln!("{}", msg);
760 push_diagnostic(msg);
761 RUNTIME_ERROR.with(|cell| {
762 *cell.borrow_mut() = Some(RuntimeError::Undefined);
763 });
764 true
765 } else {
766 false
767 }
768}
769
770const MAX_CALL_DEPTH: u32 = 50_000;
783
784pub unsafe extern "C" fn debug_app_check(fun_ptr: *const u8) -> *mut u8 {
790 let has_error = RUNTIME_ERROR.with(|cell| cell.borrow().is_some());
793
794 if !has_error {
796 let depth = CALL_DEPTH.with(|c| {
797 let d = c.get() + 1;
798 c.set(d);
799 d
800 });
801 if depth > MAX_CALL_DEPTH {
802 RUNTIME_ERROR.with(|cell| {
803 *cell.borrow_mut() = Some(RuntimeError::StackOverflow);
804 });
805 return error_poison_ptr();
806 }
807 }
808 if fun_ptr.is_null() {
809 if has_error {
810 return error_poison_ptr(); }
812 let msg = "[JIT] App: fun_ptr is NULL — unresolved binding".to_string();
813 eprintln!("{}", msg);
814 push_diagnostic(msg);
815 RUNTIME_ERROR.with(|cell| {
816 *cell.borrow_mut() = Some(RuntimeError::NullFunPtr);
817 });
818 return error_poison_ptr();
819 }
820 let tag = unsafe { *fun_ptr };
823 if tag != tidepool_heap::layout::TAG_CLOSURE {
824 use std::io::Write;
825 let mut stderr = std::io::stderr().lock();
826 if has_error {
827 return error_poison_ptr(); }
829 let tag_name = match tag {
830 0 => "Closure",
831 1 => "Thunk",
832 2 => "Con",
833 3 => "Lit",
834 _ => "UNKNOWN",
835 };
836 let msg = format!(
837 "[JIT] App: fun_ptr={:p} has tag {} ({}) — expected Closure!",
838 fun_ptr, tag, tag_name
839 );
840 let _ = writeln!(stderr, "{}", msg);
841 push_diagnostic(msg);
842 if tag == tidepool_heap::layout::TAG_CON {
843 let con_tag = unsafe { *(fun_ptr.add(layout::CON_TAG_OFFSET as usize) as *const u64) };
846 let num_fields =
847 unsafe { *(fun_ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *const u16) };
848 let msg2 = format!("[JIT] Con tag={}, num_fields={}", con_tag, num_fields);
849 let _ = writeln!(stderr, "{}", msg2);
850 push_diagnostic(msg2);
851 }
852 let _ = stderr.flush();
853 RUNTIME_ERROR.with(|cell| {
854 *cell.borrow_mut() = Some(RuntimeError::BadFunPtrTag(tag));
855 });
856 return error_poison_ptr();
857 }
858 std::ptr::null_mut() }
860
861pub extern "C" fn runtime_new_byte_array(size: i64) -> i64 {
869 if size < 0 {
870 RUNTIME_ERROR.with(|cell| {
871 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
872 "negative size in byte array allocation".to_string(),
873 ));
874 });
875 return error_poison_ptr() as i64;
876 }
877 let total = 8usize.saturating_add(size as usize);
878 let layout =
879 std::alloc::Layout::from_size_align(total, 8).unwrap_or_else(|_| std::process::abort());
880 let ptr = unsafe { std::alloc::alloc_zeroed(layout) };
882 if ptr.is_null() {
883 std::alloc::handle_alloc_error(layout);
884 }
885 unsafe {
887 *(ptr as *mut u64) = size as u64;
888 }
889 ptr as i64
890}
891
892pub extern "C" fn runtime_copy_addr_to_byte_array(src: i64, dest_ba: i64, dest_off: i64, len: i64) {
894 if check_ptr_invalid(src as *const u8, "runtime_copy_addr_to_byte_array")
895 || check_ptr_invalid(dest_ba as *const u8, "runtime_copy_addr_to_byte_array")
896 {
897 return;
898 }
899 if dest_off < 0 || len < 0 {
900 return;
901 }
902 let dest_size = unsafe { *(dest_ba as *const u64) } as usize;
905 if (dest_off as usize).saturating_add(len as usize) > dest_size {
906 return;
907 }
908 let src_ptr = src as *const u8;
909 let dest_ptr = unsafe { (dest_ba as *mut u8).add(8 + dest_off as usize) };
911 unsafe {
914 std::ptr::copy_nonoverlapping(src_ptr, dest_ptr, len as usize);
915 }
916}
917
918pub extern "C" fn runtime_set_byte_array(ba: i64, off: i64, len: i64, val: i64) {
920 if check_ptr_invalid(ba as *const u8, "runtime_set_byte_array") {
921 return;
922 }
923 if off < 0 || len < 0 {
924 return;
925 }
926 let ba_size = unsafe { *(ba as *const u64) } as usize;
927 if (off as usize).saturating_add(len as usize) > ba_size {
928 return;
929 }
930 let ptr = unsafe { (ba as *mut u8).add(8 + off as usize) };
932 unsafe {
934 std::ptr::write_bytes(ptr, val as u8, len as usize);
935 }
936}
937
938pub extern "C" fn runtime_shrink_byte_array(ba: i64, new_size: i64) {
940 if new_size < 0 || (ba as u64) < MIN_VALID_ADDR {
941 return;
942 }
943 let old_size = unsafe { *(ba as *const u64) } as i64;
944 if new_size > old_size {
945 return; }
947 unsafe {
950 *(ba as *mut u64) = new_size as u64;
951 }
952}
953
954pub extern "C" fn runtime_resize_byte_array(ba: i64, new_size: i64) -> i64 {
957 if new_size < 0 {
958 RUNTIME_ERROR.with(|cell| {
959 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
960 "negative size in byte array allocation".to_string(),
961 ));
962 });
963 return error_poison_ptr() as i64;
964 }
965 if (ba as u64) < MIN_VALID_ADDR {
966 return error_poison_ptr() as i64;
967 }
968 let old_ptr = ba as *mut u8;
969 let old_size = unsafe { *(old_ptr as *const u64) } as usize;
971 let new_size = new_size as usize;
972
973 let new_total = 8usize.saturating_add(new_size);
974 let new_layout =
975 std::alloc::Layout::from_size_align(new_total, 8).unwrap_or_else(|_| std::process::abort());
976 let new_ptr = unsafe { std::alloc::alloc_zeroed(new_layout) };
978 if new_ptr.is_null() {
979 std::alloc::handle_alloc_error(new_layout);
980 }
981
982 let copy_len = old_size.min(new_size);
984 unsafe {
987 std::ptr::copy_nonoverlapping(old_ptr.add(8), new_ptr.add(8), copy_len);
988 }
989
990 unsafe {
992 *(new_ptr as *mut u64) = new_size as u64;
993 }
994
995 let old_total = 8 + old_size;
997 let old_layout =
998 std::alloc::Layout::from_size_align(old_total, 8).unwrap_or_else(|_| std::process::abort());
999 unsafe {
1001 std::alloc::dealloc(old_ptr, old_layout);
1002 }
1003
1004 new_ptr as i64
1005}
1006
1007pub extern "C" fn runtime_copy_byte_array(
1010 src: i64,
1011 src_off: i64,
1012 dest: i64,
1013 dest_off: i64,
1014 len: i64,
1015) {
1016 if check_ptr_invalid(src as *const u8, "runtime_copy_byte_array")
1017 || check_ptr_invalid(dest as *const u8, "runtime_copy_byte_array")
1018 {
1019 return;
1020 }
1021 let src_size = unsafe { *(src as *const u64) } as usize;
1023 let dest_size = unsafe { *(dest as *const u64) } as usize;
1024 if src_off < 0 || dest_off < 0 || len < 0 {
1025 return; }
1027 let src_off = src_off as usize;
1028 let dest_off = dest_off as usize;
1029 let len = len as usize;
1030 if src_off.saturating_add(len) > src_size || dest_off.saturating_add(len) > dest_size {
1031 return; }
1033
1034 let src_ptr = unsafe { (src as *const u8).add(8 + src_off) };
1037 let dest_ptr = unsafe { (dest as *mut u8).add(8 + dest_off) };
1038 unsafe {
1041 std::ptr::copy(src_ptr, dest_ptr, len);
1042 }
1043}
1044
1045pub extern "C" fn runtime_compare_byte_arrays(
1047 a: i64,
1048 a_off: i64,
1049 b: i64,
1050 b_off: i64,
1051 len: i64,
1052) -> i64 {
1053 if check_ptr_invalid(a as *const u8, "runtime_compare_byte_arrays")
1054 || check_ptr_invalid(b as *const u8, "runtime_compare_byte_arrays")
1055 {
1056 return 0;
1057 }
1058 if a_off < 0 || b_off < 0 || len < 0 {
1059 return 0;
1060 }
1061 let a_size = unsafe { *(a as *const u64) } as usize;
1062 let b_size = unsafe { *(b as *const u64) } as usize;
1063 if (a_off as usize).saturating_add(len as usize) > a_size
1064 || (b_off as usize).saturating_add(len as usize) > b_size
1065 {
1066 return 0;
1067 }
1068
1069 let a_ptr = unsafe { (a as *const u8).add(8 + a_off as usize) };
1072 let b_ptr = unsafe { (b as *const u8).add(8 + b_off as usize) };
1073 let a_slice = unsafe { std::slice::from_raw_parts(a_ptr, len as usize) };
1074 let b_slice = unsafe { std::slice::from_raw_parts(b_ptr, len as usize) };
1075 match a_slice.cmp(b_slice) {
1076 std::cmp::Ordering::Less => -1,
1077 std::cmp::Ordering::Equal => 0,
1078 std::cmp::Ordering::Greater => 1,
1079 }
1080}
1081
1082pub extern "C" fn runtime_new_boxed_array(len: i64, init: i64) -> i64 {
1090 if len < 0 {
1091 RUNTIME_ERROR.with(|cell| {
1092 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
1093 "negative length in array allocation".to_string(),
1094 ));
1095 });
1096 return error_poison_ptr() as i64;
1097 }
1098 let n = len as usize;
1099 let slot_bytes = match n.checked_mul(8) {
1100 Some(v) => v,
1101 None => {
1102 RUNTIME_ERROR.with(|cell| {
1103 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
1104 "array size overflow".to_string(),
1105 ));
1106 });
1107 return error_poison_ptr() as i64;
1108 }
1109 };
1110 let total = match 8usize.checked_add(slot_bytes) {
1111 Some(v) => v,
1112 None => {
1113 RUNTIME_ERROR.with(|cell| {
1114 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
1115 "array size overflow".to_string(),
1116 ));
1117 });
1118 return error_poison_ptr() as i64;
1119 }
1120 };
1121 let layout =
1122 std::alloc::Layout::from_size_align(total, 8).unwrap_or_else(|_| std::process::abort());
1123 let ptr = unsafe { std::alloc::alloc(layout) };
1125 if ptr.is_null() {
1126 std::alloc::handle_alloc_error(layout);
1127 }
1128 unsafe {
1131 let slots = ptr.add(8) as *mut i64;
1132 for i in 0..n {
1133 *slots.add(i) = init;
1134 }
1135 *(ptr as *mut u64) = n as u64;
1138 }
1139 ptr as i64
1140}
1141
1142pub extern "C" fn runtime_clone_boxed_array(src: i64, off: i64, len: i64) -> i64 {
1144 if (src as u64) < MIN_VALID_ADDR {
1145 return error_poison_ptr() as i64;
1146 }
1147 if len < 0 {
1148 RUNTIME_ERROR.with(|cell| {
1149 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
1150 "negative length in array allocation".to_string(),
1151 ));
1152 });
1153 return error_poison_ptr() as i64;
1154 }
1155 let n = len as usize;
1156 let slot_bytes = match n.checked_mul(8) {
1157 Some(v) => v,
1158 None => {
1159 RUNTIME_ERROR.with(|cell| {
1160 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
1161 "array size overflow".to_string(),
1162 ));
1163 });
1164 return error_poison_ptr() as i64;
1165 }
1166 };
1167 let total = match 8usize.checked_add(slot_bytes) {
1168 Some(v) => v,
1169 None => {
1170 RUNTIME_ERROR.with(|cell| {
1171 *cell.borrow_mut() = Some(RuntimeError::UserErrorMsg(
1172 "array size overflow".to_string(),
1173 ));
1174 });
1175 return error_poison_ptr() as i64;
1176 }
1177 };
1178
1179 let src_n = unsafe { *(src as *const u64) } as usize;
1181 if off < 0 || (off as usize).saturating_add(n) > src_n {
1182 return error_poison_ptr() as i64; }
1184
1185 let layout =
1186 std::alloc::Layout::from_size_align(total, 8).unwrap_or_else(|_| std::process::abort());
1187 let ptr = unsafe { std::alloc::alloc(layout) };
1189 if ptr.is_null() {
1190 std::alloc::handle_alloc_error(layout);
1191 }
1192 unsafe {
1195 *(ptr as *mut u64) = n as u64;
1196 let src_slots = (src as *const u8).add(8 + 8 * off as usize);
1197 let dst_slots = ptr.add(8);
1198 std::ptr::copy_nonoverlapping(src_slots, dst_slots, 8 * n);
1199 }
1200 ptr as i64
1201}
1202
1203pub extern "C" fn runtime_copy_boxed_array(
1205 src: i64,
1206 src_off: i64,
1207 dest: i64,
1208 dest_off: i64,
1209 len: i64,
1210) {
1211 if (src as u64) < MIN_VALID_ADDR || (dest as u64) < MIN_VALID_ADDR {
1212 return;
1213 }
1214 if src_off < 0 || dest_off < 0 || len < 0 {
1215 return;
1216 }
1217 let src_n = unsafe { *(src as *const u64) } as usize;
1218 let dest_n = unsafe { *(dest as *const u64) } as usize;
1219 let src_off = src_off as usize;
1220 let dest_off = dest_off as usize;
1221 let len = len as usize;
1222 if src_off.saturating_add(len) > src_n || dest_off.saturating_add(len) > dest_n {
1223 return; }
1225
1226 let src_ptr = unsafe { (src as *const u8).add(8 + 8 * src_off) };
1230 let dest_ptr = unsafe { (dest as *mut u8).add(8 + 8 * dest_off) };
1231 unsafe {
1232 std::ptr::copy(src_ptr, dest_ptr, 8 * len);
1233 }
1234}
1235
1236pub extern "C" fn runtime_shrink_boxed_array(arr: i64, new_len: i64) {
1238 if new_len < 0 || (arr as u64) < MIN_VALID_ADDR {
1239 return;
1240 }
1241 let old_len = unsafe { *(arr as *const u64) } as i64;
1242 if new_len > old_len {
1243 return; }
1245 unsafe {
1248 *(arr as *mut u64) = new_len as u64;
1249 }
1250}
1251
1252pub extern "C" fn runtime_cas_boxed_array(arr: i64, idx: i64, expected: i64, new: i64) -> i64 {
1255 if (arr as u64) < MIN_VALID_ADDR || idx < 0 {
1256 return error_poison_ptr() as i64;
1257 }
1258 let n = unsafe { *(arr as *const u64) } as usize;
1259 if idx as usize >= n {
1260 return error_poison_ptr() as i64;
1261 }
1262 let slot = unsafe { (arr as *mut u8).add(8 + 8 * idx as usize) as *mut i64 };
1265 let old = unsafe { *slot };
1266 if old == expected {
1267 unsafe { *slot = new };
1268 }
1269 old
1270}
1271
1272pub extern "C" fn runtime_decode_double_mantissa(bits: i64) -> i64 {
1275 let (man, _) = decode_double_int64(f64::from_bits(bits as u64));
1276 man
1277}
1278
1279pub extern "C" fn runtime_decode_double_exponent(bits: i64) -> i64 {
1281 let (_, exp) = decode_double_int64(f64::from_bits(bits as u64));
1282 exp
1283}
1284
1285fn decode_double_int64(d: f64) -> (i64, i64) {
1289 if d == 0.0 || d.is_nan() {
1290 return (0, 0);
1291 }
1292 if d.is_infinite() {
1293 return (if d > 0.0 { 1 } else { -1 }, 0);
1294 }
1295 let bits = d.to_bits();
1296 let sign: i64 = if bits >> 63 == 0 { 1 } else { -1 };
1297 let raw_exp = ((bits >> 52) & 0x7ff) as i32;
1298 let raw_man = (bits & 0x000f_ffff_ffff_ffff) as i64;
1299 let (man, exp) = if raw_exp == 0 {
1300 (raw_man, 1 - 1023 - 52)
1302 } else {
1303 (raw_man | (1i64 << 52), raw_exp - 1023 - 52)
1305 };
1306 let man = sign * man;
1307 if man != 0 {
1308 let tz = man.unsigned_abs().trailing_zeros();
1309 (man >> tz, (exp + tz as i32) as i64)
1310 } else {
1311 (0, 0)
1312 }
1313}
1314
1315pub extern "C" fn runtime_strlen(addr: i64) -> i64 {
1317 if check_ptr_invalid(addr as *const u8, "runtime_strlen") {
1318 return 0;
1319 }
1320 let ptr = addr as *const u8;
1321 let mut len = 0i64;
1322 unsafe {
1325 while *ptr.add(len as usize) != 0 {
1326 len += 1;
1327 }
1328 }
1329 len
1330}
1331
1332pub extern "C" fn runtime_text_measure_off(addr: i64, off: i64, len: i64, cnt: i64) -> i64 {
1341 if len <= 0 || cnt <= 0 {
1342 return 0;
1343 }
1344 if check_ptr_invalid(addr as *const u8, "runtime_text_measure_off") {
1345 return 0;
1346 }
1347 let ptr = (addr + off) as *const u8;
1348 let len = len as usize;
1349 let cnt = cnt as usize;
1350 let mut byte_pos = 0usize;
1351 let mut chars_found = 0usize;
1352 while chars_found < cnt && byte_pos < len {
1353 let b = unsafe { *ptr.add(byte_pos) };
1355 let char_len = if b < 0x80 {
1356 1
1357 } else if b < 0xE0 {
1358 2
1359 } else if b < 0xF0 {
1360 3
1361 } else {
1362 4
1363 };
1364 byte_pos += char_len;
1365 chars_found += 1;
1366 }
1367 if chars_found >= cnt {
1368 byte_pos as i64
1370 } else {
1371 -(chars_found as i64)
1373 }
1374}
1375
1376pub extern "C" fn runtime_text_memchr(addr: i64, off: i64, len: i64, needle: i64) -> i64 {
1378 if len <= 0 {
1379 return -1;
1380 }
1381 if check_ptr_invalid(addr as *const u8, "runtime_text_memchr") {
1382 return -1;
1383 }
1384 let ptr = (addr + off) as *const u8;
1385 let slice = unsafe { std::slice::from_raw_parts(ptr, len as usize) };
1388 match slice.iter().position(|&b| b == needle as u8) {
1389 Some(pos) => pos as i64,
1390 None => -1,
1391 }
1392}
1393
1394pub extern "C" fn runtime_text_reverse(dest: i64, src: i64, off: i64, len: i64) {
1398 if len <= 0 {
1399 return;
1400 }
1401 if check_ptr_invalid(dest as *const u8, "runtime_text_reverse")
1402 || check_ptr_invalid(src as *const u8, "runtime_text_reverse")
1403 {
1404 return;
1405 }
1406 let src_ptr = (src + off) as *const u8;
1407 let src_slice = unsafe { std::slice::from_raw_parts(src_ptr, len as usize) };
1409 let dest_ptr = dest as *mut u8;
1410 let mut read_pos = 0usize;
1412 let mut write_pos = len as usize;
1413 while read_pos < len as usize {
1414 let b = src_slice[read_pos];
1415 let char_len = if b < 0x80 {
1416 1
1417 } else if b < 0xE0 {
1418 2
1419 } else if b < 0xF0 {
1420 3
1421 } else {
1422 4
1423 };
1424 write_pos -= char_len;
1425 unsafe {
1428 std::ptr::copy_nonoverlapping(
1429 src_slice.as_ptr().add(read_pos),
1430 dest_ptr.add(write_pos),
1431 char_len,
1432 );
1433 }
1434 read_pos += char_len;
1435 }
1436}
1437
1438pub extern "C" fn runtime_show_double_addr(bits: i64) -> i64 {
1441 let d = f64::from_bits(bits as u64);
1442 let s = haskell_show_double(d);
1443 let c_str = match std::ffi::CString::new(s) {
1444 Ok(c) => c,
1445 Err(_) => {
1446 RUNTIME_ERROR.with(|cell| {
1447 *cell.borrow_mut() = Some(RuntimeError::Undefined);
1448 });
1449 return error_poison_ptr() as i64;
1450 }
1451 };
1452 let ptr = c_str.into_raw();
1453 ptr as i64
1454}
1455
1456fn haskell_show_double(d: f64) -> String {
1460 if d.is_nan() {
1461 return "NaN".to_string();
1462 }
1463 if d.is_infinite() {
1464 return if d > 0.0 { "Infinity" } else { "-Infinity" }.to_string();
1465 }
1466 if d == 0.0 {
1467 return if d.is_sign_negative() { "-0.0" } else { "0.0" }.to_string();
1468 }
1469 let abs = d.abs();
1470 if (0.1..1.0e7).contains(&abs) {
1471 let s = format!("{}", d);
1472 if s.contains('.') {
1473 s
1474 } else {
1475 format!("{}.0", s)
1476 }
1477 } else {
1478 format!("{:e}", d)
1480 }
1481}
1482
1483macro_rules! double_math_unary {
1486 ($name:ident, $op:ident) => {
1487 pub extern "C" fn $name(bits: i64) -> i64 {
1488 let d = f64::from_bits(bits as u64);
1489 f64::$op(d).to_bits() as i64
1490 }
1491 };
1492}
1493
1494double_math_unary!(runtime_double_exp, exp);
1495double_math_unary!(runtime_double_expm1, exp_m1);
1496double_math_unary!(runtime_double_log, ln);
1497double_math_unary!(runtime_double_log1p, ln_1p);
1498double_math_unary!(runtime_double_sin, sin);
1499double_math_unary!(runtime_double_cos, cos);
1500double_math_unary!(runtime_double_tan, tan);
1501double_math_unary!(runtime_double_asin, asin);
1502double_math_unary!(runtime_double_acos, acos);
1503double_math_unary!(runtime_double_atan, atan);
1504double_math_unary!(runtime_double_sinh, sinh);
1505double_math_unary!(runtime_double_cosh, cosh);
1506double_math_unary!(runtime_double_tanh, tanh);
1507double_math_unary!(runtime_double_asinh, asinh);
1508double_math_unary!(runtime_double_acosh, acosh);
1509double_math_unary!(runtime_double_atanh, atanh);
1510
1511pub extern "C" fn runtime_double_power(bits_a: i64, bits_b: i64) -> i64 {
1512 let a = f64::from_bits(bits_a as u64);
1513 let b = f64::from_bits(bits_b as u64);
1514 a.powf(b).to_bits() as i64
1515}
1516
1517pub fn host_fn_symbols() -> Vec<(&'static str, *const u8)> {
1518 vec![
1519 ("gc_trigger", gc_trigger as *const u8),
1520 ("runtime_oom", runtime_oom as *const u8),
1521 (
1522 "runtime_blackhole_trap",
1523 runtime_blackhole_trap as *const u8,
1524 ),
1525 (
1526 "runtime_bad_thunk_state_trap",
1527 runtime_bad_thunk_state_trap as *const u8,
1528 ),
1529 ("heap_force", heap_force as *const u8),
1530 ("unresolved_var_trap", unresolved_var_trap as *const u8),
1531 ("runtime_error", runtime_error as *const u8),
1532 (
1533 "runtime_error_with_msg",
1534 runtime_error_with_msg as *const u8,
1535 ),
1536 ("debug_app_check", debug_app_check as *const u8),
1537 ("trampoline_resolve", trampoline_resolve as *const u8),
1538 (
1539 "runtime_new_byte_array",
1540 runtime_new_byte_array as *const u8,
1541 ),
1542 (
1543 "runtime_copy_addr_to_byte_array",
1544 runtime_copy_addr_to_byte_array as *const u8,
1545 ),
1546 (
1547 "runtime_set_byte_array",
1548 runtime_set_byte_array as *const u8,
1549 ),
1550 (
1551 "runtime_shrink_byte_array",
1552 runtime_shrink_byte_array as *const u8,
1553 ),
1554 (
1555 "runtime_resize_byte_array",
1556 runtime_resize_byte_array as *const u8,
1557 ),
1558 (
1559 "runtime_copy_byte_array",
1560 runtime_copy_byte_array as *const u8,
1561 ),
1562 (
1563 "runtime_compare_byte_arrays",
1564 runtime_compare_byte_arrays as *const u8,
1565 ),
1566 ("runtime_strlen", runtime_strlen as *const u8),
1567 (
1568 "runtime_decode_double_mantissa",
1569 runtime_decode_double_mantissa as *const u8,
1570 ),
1571 (
1572 "runtime_decode_double_exponent",
1573 runtime_decode_double_exponent as *const u8,
1574 ),
1575 (
1576 "runtime_text_measure_off",
1577 runtime_text_measure_off as *const u8,
1578 ),
1579 ("runtime_text_memchr", runtime_text_memchr as *const u8),
1580 ("runtime_text_reverse", runtime_text_reverse as *const u8),
1581 (
1582 "runtime_new_boxed_array",
1583 runtime_new_boxed_array as *const u8,
1584 ),
1585 (
1586 "runtime_clone_boxed_array",
1587 runtime_clone_boxed_array as *const u8,
1588 ),
1589 (
1590 "runtime_copy_boxed_array",
1591 runtime_copy_boxed_array as *const u8,
1592 ),
1593 (
1594 "runtime_shrink_boxed_array",
1595 runtime_shrink_boxed_array as *const u8,
1596 ),
1597 (
1598 "runtime_cas_boxed_array",
1599 runtime_cas_boxed_array as *const u8,
1600 ),
1601 ("runtime_case_trap", runtime_case_trap as *const u8),
1602 (
1603 "runtime_show_double_addr",
1604 runtime_show_double_addr as *const u8,
1605 ),
1606 ("runtime_double_exp", runtime_double_exp as *const u8),
1608 ("runtime_double_expm1", runtime_double_expm1 as *const u8),
1609 ("runtime_double_log", runtime_double_log as *const u8),
1610 ("runtime_double_log1p", runtime_double_log1p as *const u8),
1611 ("runtime_double_sin", runtime_double_sin as *const u8),
1612 ("runtime_double_cos", runtime_double_cos as *const u8),
1613 ("runtime_double_tan", runtime_double_tan as *const u8),
1614 ("runtime_double_asin", runtime_double_asin as *const u8),
1615 ("runtime_double_acos", runtime_double_acos as *const u8),
1616 ("runtime_double_atan", runtime_double_atan as *const u8),
1617 ("runtime_double_sinh", runtime_double_sinh as *const u8),
1618 ("runtime_double_cosh", runtime_double_cosh as *const u8),
1619 ("runtime_double_tanh", runtime_double_tanh as *const u8),
1620 ("runtime_double_asinh", runtime_double_asinh as *const u8),
1621 ("runtime_double_acosh", runtime_double_acosh as *const u8),
1622 ("runtime_double_atanh", runtime_double_atanh as *const u8),
1623 ("runtime_double_power", runtime_double_power as *const u8),
1624 ]
1625}
1626
1627#[cfg(test)]
1628mod tests {
1629 use super::*;
1633 use std::alloc::{dealloc, Layout};
1634
1635 unsafe fn free_byte_array(ptr: i64) {
1637 let old_ptr = ptr as *mut u8;
1638 let size = *(old_ptr as *const u64) as usize;
1639 let layout = Layout::from_size_align(8 + size, 8).unwrap();
1640 dealloc(old_ptr, layout);
1641 }
1642
1643 #[test]
1644 fn test_runtime_new_byte_array() {
1645 unsafe {
1646 let ba = runtime_new_byte_array(10);
1647 assert_ne!(ba, 0);
1648 assert_eq!(*(ba as *const u64), 10);
1649 let bytes = std::slice::from_raw_parts((ba as *const u8).add(8), 10);
1650 assert!(bytes.iter().all(|&b| b == 0));
1651 free_byte_array(ba);
1652 }
1653 }
1654
1655 #[test]
1656 fn test_runtime_copy_addr_to_byte_array() {
1657 unsafe {
1658 let ba = runtime_new_byte_array(10);
1659 let src = b"hello";
1660 runtime_copy_addr_to_byte_array(src.as_ptr() as i64, ba, 2, 5);
1661 let bytes = std::slice::from_raw_parts((ba as *const u8).add(8), 10);
1662 assert_eq!(&bytes[2..7], b"hello");
1663 assert_eq!(bytes[0], 0);
1664 assert_eq!(bytes[1], 0);
1665 assert_eq!(bytes[7], 0);
1666 free_byte_array(ba);
1667 }
1668 }
1669
1670 #[test]
1671 fn test_runtime_set_byte_array() {
1672 unsafe {
1673 let ba = runtime_new_byte_array(10);
1674 runtime_set_byte_array(ba, 3, 4, 0xFF);
1675 let bytes = std::slice::from_raw_parts((ba as *const u8).add(8), 10);
1676 assert_eq!(bytes[2], 0);
1677 assert_eq!(bytes[3], 0xFF);
1678 assert_eq!(bytes[4], 0xFF);
1679 assert_eq!(bytes[5], 0xFF);
1680 assert_eq!(bytes[6], 0xFF);
1681 assert_eq!(bytes[7], 0);
1682 free_byte_array(ba);
1683 }
1684 }
1685
1686 #[test]
1687 fn test_runtime_shrink_byte_array() {
1688 unsafe {
1689 let ba = runtime_new_byte_array(10);
1690 runtime_shrink_byte_array(ba, 5);
1691 assert_eq!(*(ba as *const u64), 5);
1692 let layout = Layout::from_size_align(8 + 10, 8).unwrap();
1695 dealloc(ba as *mut u8, layout);
1696 }
1697 }
1698
1699 #[test]
1700 fn test_runtime_resize_byte_array_grow() {
1701 unsafe {
1702 let ba = runtime_new_byte_array(5);
1703 let bytes = std::slice::from_raw_parts_mut((ba as *mut u8).add(8), 5);
1704 bytes.copy_from_slice(b"abcde");
1705
1706 let new_ba = runtime_resize_byte_array(ba, 10);
1707 assert_eq!(*(new_ba as *const u64), 10);
1708 let new_bytes = std::slice::from_raw_parts((new_ba as *const u8).add(8), 10);
1709 assert_eq!(&new_bytes[0..5], b"abcde");
1710 assert_eq!(&new_bytes[5..10], &[0, 0, 0, 0, 0]);
1711
1712 free_byte_array(new_ba);
1713 }
1714 }
1715
1716 #[test]
1717 fn test_runtime_resize_byte_array_shrink() {
1718 unsafe {
1719 let ba = runtime_new_byte_array(10);
1720 let bytes = std::slice::from_raw_parts_mut((ba as *mut u8).add(8), 10);
1721 bytes.copy_from_slice(b"0123456789");
1722
1723 let new_ba = runtime_resize_byte_array(ba, 5);
1724 assert_eq!(*(new_ba as *const u64), 5);
1725 let new_bytes = std::slice::from_raw_parts((new_ba as *const u8).add(8), 5);
1726 assert_eq!(new_bytes, b"01234");
1727
1728 free_byte_array(new_ba);
1729 }
1730 }
1731
1732 #[test]
1733 fn test_runtime_copy_byte_array() {
1734 unsafe {
1735 let ba1 = runtime_new_byte_array(10);
1736 let ba2 = runtime_new_byte_array(10);
1737
1738 let bytes1 = std::slice::from_raw_parts_mut((ba1 as *mut u8).add(8), 10);
1739 bytes1.copy_from_slice(b"abcdefghij");
1740
1741 runtime_copy_byte_array(ba1, 2, ba2, 4, 3);
1742
1743 let bytes2 = std::slice::from_raw_parts((ba2 as *const u8).add(8), 10);
1744 assert_eq!(&bytes2[4..7], b"cde");
1745
1746 free_byte_array(ba1);
1747 free_byte_array(ba2);
1748 }
1749 }
1750
1751 #[test]
1752 fn test_runtime_copy_byte_array_overlap() {
1753 unsafe {
1754 let ba = runtime_new_byte_array(10);
1755 let bytes = std::slice::from_raw_parts_mut((ba as *mut u8).add(8), 10);
1756 bytes.copy_from_slice(b"0123456789");
1757
1758 runtime_copy_byte_array(ba, 0, ba, 2, 5);
1760
1761 assert_eq!(bytes, b"0101234789");
1762
1763 free_byte_array(ba);
1764 }
1765 }
1766
1767 #[test]
1768 fn test_runtime_compare_byte_arrays() {
1769 unsafe {
1770 let ba1 = runtime_new_byte_array(5);
1771 let ba2 = runtime_new_byte_array(5);
1772
1773 std::ptr::copy_nonoverlapping(b"apple".as_ptr(), (ba1 as *mut u8).add(8), 5);
1774 std::ptr::copy_nonoverlapping(b"apply".as_ptr(), (ba2 as *mut u8).add(8), 5);
1775
1776 assert_eq!(runtime_compare_byte_arrays(ba1, 0, ba2, 0, 4), 0); assert_eq!(runtime_compare_byte_arrays(ba1, 0, ba2, 0, 5), -1); assert_eq!(runtime_compare_byte_arrays(ba2, 0, ba1, 0, 5), 1); free_byte_array(ba1);
1781 free_byte_array(ba2);
1782 }
1783 }
1784
1785 #[test]
1786 fn test_runtime_strlen() {
1787 let s = b"hello\0world\0";
1788 unsafe {
1789 assert_eq!(runtime_strlen(s.as_ptr() as i64), 5);
1790 assert_eq!(runtime_strlen(s.as_ptr().add(6) as i64), 5);
1791 }
1792 }
1793
1794 #[test]
1801 fn test_measure_off_ascii_length() {
1802 let s = b"hello";
1804 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, i64::MAX);
1805 assert_eq!(r, -5); }
1807
1808 #[test]
1809 fn test_measure_off_ascii_take() {
1810 let s = b"hello";
1812 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 3);
1813 assert_eq!(r, 3); }
1815
1816 #[test]
1817 fn test_measure_off_ascii_take_all() {
1818 let s = b"hello";
1820 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 5);
1821 assert_eq!(r, 5); }
1823
1824 #[test]
1825 fn test_measure_off_ascii_take_more() {
1826 let s = b"hello";
1828 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 10);
1829 assert_eq!(r, -5); }
1831
1832 #[test]
1833 fn test_measure_off_ascii_drop() {
1834 let s = b"hello";
1836 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 2);
1837 assert_eq!(r, 2);
1838 }
1839
1840 #[test]
1841 fn test_measure_off_with_offset() {
1842 let s = b"hello";
1844 let r = runtime_text_measure_off(s.as_ptr() as i64, 2, 3, i64::MAX);
1845 assert_eq!(r, -3); }
1847
1848 #[test]
1849 fn test_measure_off_empty() {
1850 let s = b"hello";
1851 assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 0, 5), 0);
1852 assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 0), 0);
1853 }
1854
1855 #[test]
1856 fn test_measure_off_utf8_length() {
1857 let s = "café".as_bytes();
1859 assert_eq!(s.len(), 5);
1860 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, i64::MAX);
1861 assert_eq!(r, -4); }
1863
1864 #[test]
1865 fn test_measure_off_utf8_take() {
1866 let s = "café".as_bytes();
1868 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 3);
1869 assert_eq!(r, 3); }
1871
1872 #[test]
1873 fn test_measure_off_utf8_take_past_multibyte() {
1874 let s = "café".as_bytes();
1876 let r = runtime_text_measure_off(s.as_ptr() as i64, 0, 5, 4);
1877 assert_eq!(r, 5); }
1880
1881 #[test]
1882 fn test_measure_off_multibyte_chars() {
1883 let s = "λ😀x".as_bytes();
1885 assert_eq!(s.len(), 7);
1886 assert_eq!(
1888 runtime_text_measure_off(s.as_ptr() as i64, 0, 7, i64::MAX),
1889 -3
1890 );
1891 assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 7, 1), 2);
1893 assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 7, 2), 6);
1895 assert_eq!(
1897 runtime_text_measure_off(s.as_ptr() as i64, 2, 5, i64::MAX),
1898 -2
1899 );
1900 assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 2, 5, 1), 4);
1902 }
1903
1904 #[test]
1905 fn test_measure_off_all_widths() {
1906 let s = "Aλ文😀".as_bytes();
1908 assert_eq!(s.len(), 10);
1909 assert_eq!(
1910 runtime_text_measure_off(s.as_ptr() as i64, 0, 10, i64::MAX),
1911 -4
1912 );
1913 assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 10, 1), 1); assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 10, 2), 3); assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 10, 3), 6); assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 0, 10, 4), 10); assert_eq!(runtime_text_measure_off(s.as_ptr() as i64, 1, 9, 2), 5); }
1920
1921 #[test]
1922 fn test_runtime_text_memchr() {
1923 let s = b"abacaba";
1924 assert_eq!(runtime_text_memchr(s.as_ptr() as i64, 0, 7, b'a' as i64), 0);
1925 assert_eq!(runtime_text_memchr(s.as_ptr() as i64, 1, 6, b'a' as i64), 1); assert_eq!(
1927 runtime_text_memchr(s.as_ptr() as i64, 0, 7, b'z' as i64),
1928 -1
1929 );
1930 }
1931
1932 #[test]
1937 fn test_reverse_ascii() {
1938 let src = b"hello";
1939 let mut dest = [0u8; 5];
1940 runtime_text_reverse(dest.as_mut_ptr() as i64, src.as_ptr() as i64, 0, 5);
1941 assert_eq!(&dest, b"olleh");
1942 }
1943
1944 #[test]
1945 fn test_reverse_ascii_with_offset() {
1946 let src = b"XXhello";
1948 let mut dest = [0u8; 5];
1949 runtime_text_reverse(dest.as_mut_ptr() as i64, src.as_ptr() as i64, 2, 5);
1950 assert_eq!(&dest, b"olleh");
1951 }
1952
1953 #[test]
1954 fn test_reverse_utf8() {
1955 let src = "λ😀".as_bytes();
1958 let mut dest = [0u8; 6];
1959 runtime_text_reverse(dest.as_mut_ptr() as i64, src.as_ptr() as i64, 0, 6);
1960 assert_eq!(std::str::from_utf8(&dest).unwrap(), "😀λ");
1961 }
1962
1963 #[test]
1964 fn test_reverse_all_widths() {
1965 let src = "Aλ文😀".as_bytes();
1967 let mut dest = [0u8; 10];
1968 runtime_text_reverse(dest.as_mut_ptr() as i64, src.as_ptr() as i64, 0, 10);
1969 assert_eq!(std::str::from_utf8(&dest).unwrap(), "😀文λA");
1970 }
1971
1972 #[test]
1973 fn test_reverse_single_char() {
1974 let src = b"x";
1975 let mut dest = [0u8; 1];
1976 runtime_text_reverse(dest.as_mut_ptr() as i64, src.as_ptr() as i64, 0, 1);
1977 assert_eq!(&dest, b"x");
1978 }
1979
1980 #[test]
1985 fn test_memchr_found() {
1986 let s = b"hello:world";
1987 assert_eq!(
1988 runtime_text_memchr(s.as_ptr() as i64, 0, 11, b':' as i64),
1989 5
1990 );
1991 }
1992
1993 #[test]
1994 fn test_memchr_not_found() {
1995 let s = b"hello";
1996 assert_eq!(
1997 runtime_text_memchr(s.as_ptr() as i64, 0, 5, b':' as i64),
1998 -1
1999 );
2000 }
2001
2002 #[test]
2003 fn test_memchr_with_offset() {
2004 let s = b"a:b:c";
2005 assert_eq!(runtime_text_memchr(s.as_ptr() as i64, 2, 3, b':' as i64), 1);
2007 }
2008
2009 #[test]
2010 fn test_memchr_first_byte() {
2011 let s = b":hello";
2012 assert_eq!(runtime_text_memchr(s.as_ptr() as i64, 0, 6, b':' as i64), 0);
2013 }
2014
2015 #[test]
2016 fn test_memchr_last_byte() {
2017 let s = b"hello:";
2018 assert_eq!(runtime_text_memchr(s.as_ptr() as i64, 0, 6, b':' as i64), 5);
2019 }
2020
2021 #[test]
2026 fn test_decode_double_3_14() {
2027 let (m, e) = decode_double_int64(3.14);
2028 assert_eq!(m as f64 * (2.0f64).powi(e as i32), 3.14);
2029 }
2030
2031 #[test]
2032 fn test_decode_double_1_0() {
2033 let (m, e) = decode_double_int64(1.0);
2034 assert_eq!((m, e), (1, 0));
2035 }
2036
2037 #[test]
2038 fn test_decode_double_42_0() {
2039 let (m, e) = decode_double_int64(42.0);
2040 assert_eq!(m as f64 * (2.0f64).powi(e as i32), 42.0);
2041 }
2042
2043 #[test]
2044 fn test_decode_double_zero() {
2045 assert_eq!(decode_double_int64(0.0), (0, 0));
2046 }
2047
2048 #[test]
2049 fn test_decode_double_negative() {
2050 let (m, e) = decode_double_int64(-1.5);
2051 assert_eq!((m, e), (-3, -1));
2052 }
2053
2054 #[test]
2055 fn test_decode_double_runtime_mantissa() {
2056 let bits = 3.14f64.to_bits() as i64;
2057 let m = runtime_decode_double_mantissa(bits);
2058 let e = runtime_decode_double_exponent(bits);
2059 assert_eq!(m as f64 * (2.0f64).powi(e as i32), 3.14);
2060 }
2061
2062 #[test]
2063 fn test_diagnostics() {
2064 let _ = drain_diagnostics();
2065 push_diagnostic("test1".to_string());
2066 push_diagnostic("test2".to_string());
2067 let d = drain_diagnostics();
2068 assert_eq!(d, vec!["test1".to_string(), "test2".to_string()]);
2069 let d2 = drain_diagnostics();
2070 assert!(d2.is_empty());
2071 }
2072
2073 extern "C" fn mock_gc_trigger(_vmctx: *mut VMContext) {}
2074
2075 thread_local! {
2076 static TEST_RESULT: Cell<*mut u8> = const { Cell::new(std::ptr::null_mut()) };
2077 }
2078
2079 unsafe extern "C" fn test_thunk_entry(_vmctx: *mut VMContext, _thunk: *mut u8) -> *mut u8 {
2081 TEST_RESULT.with(|r| r.get())
2082 }
2083
2084 #[test]
2085 fn test_heap_force_thunk_unevaluated() {
2086 unsafe {
2087 let mut vmctx = VMContext {
2088 alloc_ptr: std::ptr::null_mut(),
2089 alloc_limit: std::ptr::null_mut(),
2090 gc_trigger: mock_gc_trigger,
2091 tail_callee: std::ptr::null_mut(),
2092 tail_arg: std::ptr::null_mut(),
2093 };
2094
2095 let mut lit_buf = [0u8; heap_layout::LIT_SIZE];
2097 let lit_ptr = lit_buf.as_mut_ptr();
2098 heap_layout::write_header(lit_ptr, layout::TAG_LIT, heap_layout::LIT_SIZE as u16);
2099 *(lit_ptr.add(layout::LIT_TAG_OFFSET as usize)) = 0; *(lit_ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = 42;
2101
2102 let mut thunk_buf = [0u8; layout::THUNK_MIN_SIZE as usize];
2104 let thunk_ptr = thunk_buf.as_mut_ptr();
2105 heap_layout::write_header(thunk_ptr, layout::TAG_THUNK, layout::THUNK_MIN_SIZE as u16);
2106 *(thunk_ptr.add(layout::THUNK_STATE_OFFSET as usize)) = layout::THUNK_UNEVALUATED;
2107
2108 TEST_RESULT.with(|r| r.set(lit_ptr));
2109 *(thunk_ptr.add(layout::THUNK_CODE_PTR_OFFSET as usize) as *mut usize) =
2110 test_thunk_entry as *const () as usize;
2111
2112 let res = heap_force(&mut vmctx, thunk_ptr);
2113 assert_eq!(res, lit_ptr);
2114 assert_eq!(
2115 *(thunk_ptr.add(layout::THUNK_STATE_OFFSET as usize)),
2116 layout::THUNK_EVALUATED
2117 );
2118 assert_eq!(
2119 *(thunk_ptr.add(layout::THUNK_INDIRECTION_OFFSET as usize) as *const *mut u8),
2120 lit_ptr
2121 );
2122 }
2123 }
2124
2125 #[test]
2126 fn test_heap_force_thunk_evaluated() {
2127 unsafe {
2128 let mut vmctx = VMContext {
2129 alloc_ptr: std::ptr::null_mut(),
2130 alloc_limit: std::ptr::null_mut(),
2131 gc_trigger: mock_gc_trigger,
2132 tail_callee: std::ptr::null_mut(),
2133 tail_arg: std::ptr::null_mut(),
2134 };
2135
2136 let mut lit_buf = [0u8; 32];
2138 let lit_ptr = lit_buf.as_mut_ptr();
2139 heap_layout::write_header(lit_ptr, layout::TAG_LIT, 32);
2140
2141 let mut thunk_buf = [0u8; layout::THUNK_MIN_SIZE as usize];
2143 let thunk_ptr = thunk_buf.as_mut_ptr();
2144 heap_layout::write_header(thunk_ptr, layout::TAG_THUNK, layout::THUNK_MIN_SIZE as u16);
2145 *(thunk_ptr.add(layout::THUNK_STATE_OFFSET as usize)) = layout::THUNK_EVALUATED;
2146 *(thunk_ptr.add(layout::THUNK_INDIRECTION_OFFSET as usize) as *mut *mut u8) = lit_ptr;
2147
2148 let res = heap_force(&mut vmctx, thunk_ptr);
2149 assert_eq!(res, lit_ptr);
2150 }
2151 }
2152
2153 #[test]
2154 fn test_heap_force_thunk_blackhole() {
2155 unsafe {
2156 let mut vmctx = VMContext {
2157 alloc_ptr: std::ptr::null_mut(),
2158 alloc_limit: std::ptr::null_mut(),
2159 gc_trigger: mock_gc_trigger,
2160 tail_callee: std::ptr::null_mut(),
2161 tail_arg: std::ptr::null_mut(),
2162 };
2163
2164 RUNTIME_ERROR.with(|cell| *cell.borrow_mut() = None);
2166
2167 let mut thunk_buf = [0u8; layout::THUNK_MIN_SIZE as usize];
2169 let thunk_ptr = thunk_buf.as_mut_ptr();
2170 heap_layout::write_header(thunk_ptr, layout::TAG_THUNK, layout::THUNK_MIN_SIZE as u16);
2171 *(thunk_ptr.add(layout::THUNK_STATE_OFFSET as usize)) = layout::THUNK_BLACKHOLE;
2172
2173 let res = heap_force(&mut vmctx, thunk_ptr);
2174 assert_eq!(res, error_poison_ptr());
2176
2177 let err = take_runtime_error().expect("Should have flagged error");
2178 assert!(matches!(err, RuntimeError::BlackHole));
2179 }
2180 }
2181
2182 #[test]
2183 fn test_heap_force_thunk_null_code_ptr() {
2184 unsafe {
2185 let mut vmctx = VMContext {
2186 alloc_ptr: std::ptr::null_mut(),
2187 alloc_limit: std::ptr::null_mut(),
2188 gc_trigger: mock_gc_trigger,
2189 tail_callee: std::ptr::null_mut(),
2190 tail_arg: std::ptr::null_mut(),
2191 };
2192
2193 RUNTIME_ERROR.with(|cell| *cell.borrow_mut() = None);
2194
2195 let mut thunk_buf = [0u8; layout::THUNK_MIN_SIZE as usize];
2196 let thunk_ptr = thunk_buf.as_mut_ptr();
2197 heap_layout::write_header(thunk_ptr, layout::TAG_THUNK, layout::THUNK_MIN_SIZE as u16);
2198 *(thunk_ptr.add(layout::THUNK_STATE_OFFSET as usize)) = layout::THUNK_UNEVALUATED;
2199 *(thunk_ptr.add(layout::THUNK_CODE_PTR_OFFSET as usize) as *mut usize) = 0;
2200
2201 let res = heap_force(&mut vmctx, thunk_ptr);
2202 assert_eq!(res, error_poison_ptr());
2203 let err = take_runtime_error().expect("Should have flagged error");
2204 assert!(matches!(err, RuntimeError::NullFunPtr));
2205 }
2206 }
2207
2208 #[test]
2209 fn test_heap_force_thunk_bad_state() {
2210 unsafe {
2211 let mut vmctx = VMContext {
2212 alloc_ptr: std::ptr::null_mut(),
2213 alloc_limit: std::ptr::null_mut(),
2214 gc_trigger: mock_gc_trigger,
2215 tail_callee: std::ptr::null_mut(),
2216 tail_arg: std::ptr::null_mut(),
2217 };
2218
2219 RUNTIME_ERROR.with(|cell| *cell.borrow_mut() = None);
2220
2221 let mut thunk_buf = [0u8; layout::THUNK_MIN_SIZE as usize];
2222 let thunk_ptr = thunk_buf.as_mut_ptr();
2223 heap_layout::write_header(thunk_ptr, layout::TAG_THUNK, layout::THUNK_MIN_SIZE as u16);
2224 *(thunk_ptr.add(layout::THUNK_STATE_OFFSET as usize)) = 255; let res = heap_force(&mut vmctx, thunk_ptr);
2227 assert_eq!(res, error_poison_ptr());
2228 let err = take_runtime_error().expect("Should have flagged error");
2229 assert!(matches!(err, RuntimeError::BadThunkState(255)));
2230 }
2231 }
2232}
2233
2234pub extern "C" fn runtime_case_trap(scrut_ptr: i64, num_alts: i64, alt_tags: i64) -> *mut u8 {
2240 let has_error = RUNTIME_ERROR.with(|cell| cell.borrow().is_some());
2245 if has_error {
2246 return error_poison_ptr();
2247 }
2248
2249 let ptr = scrut_ptr as *const u8;
2250
2251 if !ptr.is_null()
2253 && unsafe { tidepool_heap::layout::read_tag(ptr) } == tidepool_heap::layout::TAG_CLOSURE
2255 {
2256 let code_ptr =
2258 unsafe { *(ptr.add(tidepool_heap::layout::CLOSURE_CODE_PTR_OFFSET) as *const usize) };
2259 if code_ptr == poison_trampoline_lazy as *const () as usize
2260 || code_ptr == poison_trampoline_lazy_msg as *const () as usize
2261 {
2262 unsafe {
2266 let func: unsafe extern "C" fn(*mut VMContext, *mut u8, *mut u8) -> *mut u8 =
2267 std::mem::transmute(code_ptr);
2268 func(std::ptr::null_mut(), ptr as *mut u8, std::ptr::null_mut());
2269 }
2270 return error_poison_ptr();
2271 }
2272 }
2273
2274 use std::io::Write;
2275 if check_ptr_invalid(scrut_ptr as *const u8, "runtime_case_trap") {
2276 return error_poison_ptr();
2277 }
2278 let tag_byte = unsafe { *ptr };
2280 let tag_name = match tag_byte {
2281 0 => "Closure",
2282 1 => "Thunk",
2283 2 => "Con",
2284 3 => "Lit",
2285 0xFF => "Forwarded(GC bug!)",
2286 _ => "UNKNOWN",
2287 };
2288
2289 let expected: Vec<u64> = if num_alts > 0 && alt_tags != 0 {
2292 (0..num_alts as usize)
2293 .map(|i| unsafe { *((alt_tags as *const u64).add(i)) })
2294 .collect()
2295 } else {
2296 vec![]
2297 };
2298
2299 let raw_bytes: Vec<u8> = (0..32).map(|i| unsafe { *ptr.add(i) }).collect();
2303 let mut stderr = std::io::stderr().lock();
2304 let _ = writeln!(stderr, "[CASE TRAP] raw bytes: {:02x?}", raw_bytes);
2305
2306 if tag_byte == layout::TAG_CON {
2307 let con_tag = unsafe { *(ptr.add(layout::CON_TAG_OFFSET as usize) as *const u64) };
2309 let num_fields =
2310 unsafe { *(ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *const u16) };
2311 let _ = writeln!(
2312 stderr,
2313 "[CASE TRAP] Con: con_tag={:#x}, num_fields={}, expected_tags={:?}",
2314 con_tag, num_fields, expected
2315 );
2316 } else if tag_byte == layout::TAG_LIT {
2317 let lit_tag = unsafe { *(ptr.add(layout::LIT_TAG_OFFSET as usize) as *const u64) };
2319 let value = unsafe { *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *const u64) };
2320 let _ = writeln!(
2321 stderr,
2322 "[CASE TRAP] Lit: lit_tag={:#x}, value={:#x}, expected_tags={:?}",
2323 lit_tag, value, expected
2324 );
2325 } else if tag_byte == layout::TAG_CLOSURE {
2326 let code_ptr =
2328 unsafe { *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET as usize) as *const u64) };
2329 let num_captured =
2330 unsafe { *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET as usize) as *const u16) };
2331 let _ = writeln!(
2332 stderr,
2333 "[CASE TRAP] Closure: code_ptr={:#x}, num_captured={}, expected_tags={:?}",
2334 code_ptr, num_captured, expected
2335 );
2336 } else {
2337 let _ = writeln!(
2338 stderr,
2339 "[CASE TRAP] tag_byte={} ({}), expected_tags={:?}",
2340 tag_byte, tag_name, expected
2341 );
2342 }
2343 let _ = stderr.flush();
2344 drop(stderr);
2345 RUNTIME_ERROR.with(|cell| {
2346 *cell.borrow_mut() = Some(RuntimeError::Undefined);
2347 });
2348 error_poison_ptr()
2349}