Skip to main content

tidepool_codegen/
heap_bridge.rs

1use crate::context::VMContext;
2use crate::layout::{
3    self, LIT_TAG_ADDR, LIT_TAG_ARRAY, LIT_TAG_BYTEARRAY, LIT_TAG_CHAR, LIT_TAG_DOUBLE,
4    LIT_TAG_FLOAT, LIT_TAG_INT, LIT_TAG_SMALLARRAY, LIT_TAG_STRING, LIT_TAG_WORD,
5};
6use std::fmt;
7use tidepool_eval::value::Value;
8use tidepool_heap::layout as heap_layout;
9use tidepool_repr::{DataConId, Literal};
10
11#[derive(Debug)]
12pub enum BridgeError {
13    UnexpectedHeapTag(u8),
14    UnexpectedLitTag(u8),
15    NullPointer,
16    NurseryExhausted,
17    TooManyFields { count: usize },
18    DataTooLarge { len: usize },
19    TooDeep,
20    UnevaluatedThunk,
21    BlackHole,
22    UnknownThunkState(u8),
23    InternalError(String),
24}
25
26impl fmt::Display for BridgeError {
27    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28        match self {
29            BridgeError::UnexpectedHeapTag(t) => write!(f, "unexpected heap tag: {}", t),
30            BridgeError::UnexpectedLitTag(t) => write!(f, "unexpected lit tag: {}", t),
31            BridgeError::NullPointer => write!(f, "null pointer"),
32            BridgeError::NurseryExhausted => write!(f, "nursery exhausted"),
33            BridgeError::TooManyFields { count } => write!(f, "too many Con fields: {}", count),
34            BridgeError::DataTooLarge { len } => write!(f, "data too large: {} bytes", len),
35            BridgeError::TooDeep => write!(f, "heap structure too deep (>10000 levels)"),
36            BridgeError::UnevaluatedThunk => write!(f, "unevaluated thunk"),
37            BridgeError::BlackHole => write!(f, "blackhole (thunk forcing itself)"),
38            BridgeError::UnknownThunkState(state) => write!(f, "unknown thunk state: {}", state),
39            BridgeError::InternalError(msg) => write!(f, "internal error: {}", msg),
40        }
41    }
42}
43
44impl std::error::Error for BridgeError {}
45
46/// Convert a heap-allocated object to a Value.
47///
48/// # Safety
49///
50/// `ptr` must point to a valid HeapObject allocated by the JIT nursery.
51pub unsafe fn heap_to_value(ptr: *const u8) -> Result<Value, BridgeError> {
52    // SAFETY: Caller guarantees ptr is a valid HeapObject from the JIT nursery.
53    heap_to_value_inner(ptr, 0, std::ptr::null_mut())
54}
55
56/// Convert a heap-allocated object to a Value, forcing any unevaluated thunks
57/// encountered during traversal.
58///
59/// # Safety
60///
61/// `ptr` must point to a valid HeapObject allocated by the JIT nursery.
62/// `vmctx` must point to a valid VMContext (required for forcing thunks).
63pub unsafe fn heap_to_value_forcing(
64    ptr: *const u8,
65    vmctx: *mut VMContext,
66) -> Result<Value, BridgeError> {
67    // SAFETY: Caller guarantees ptr is a valid HeapObject and vmctx is a valid VMContext.
68    heap_to_value_inner(ptr, 0, vmctx)
69}
70
71const MAX_DEPTH: usize = 10_000;
72const MAX_FIELDS: usize = 1024;
73const MAX_DATA_SIZE: usize = 64 * 1024 * 1024; // 64MB
74
75unsafe fn heap_to_value_inner(
76    ptr: *const u8,
77    depth: usize,
78    vmctx: *mut VMContext,
79) -> Result<Value, BridgeError> {
80    // SAFETY: ptr is a valid HeapObject from the JIT nursery (checked non-null below).
81    // All field reads use known layout offsets. Recursion depth is bounded by MAX_DEPTH.
82    if ptr.is_null() {
83        return Err(BridgeError::NullPointer);
84    }
85    if depth > MAX_DEPTH {
86        return Err(BridgeError::TooDeep);
87    }
88
89    let tag = *ptr;
90    match tag {
91        t if t == layout::TAG_LIT => {
92            let lit_tag = *ptr.add(layout::LIT_TAG_OFFSET as usize) as i64;
93            let raw_value = *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *const i64);
94
95            match lit_tag {
96                x if x == LIT_TAG_INT => Ok(Value::Lit(Literal::LitInt(raw_value))),
97                x if x == LIT_TAG_WORD => Ok(Value::Lit(Literal::LitWord(raw_value as u64))),
98                x if x == LIT_TAG_CHAR => Ok(Value::Lit(Literal::LitChar(
99                    char::from_u32(raw_value as u32).unwrap_or('\0'),
100                ))),
101                x if x == LIT_TAG_FLOAT => Ok(Value::Lit(Literal::LitFloat(raw_value as u64))),
102                x if x == LIT_TAG_DOUBLE => Ok(Value::Lit(Literal::LitDouble(raw_value as u64))),
103                x if x == LIT_TAG_STRING => {
104                    // LitString: value is pointer to [len: u64][bytes...]
105                    // Use read_unaligned because JIT data sections may not be 8-byte aligned
106                    let data_ptr = raw_value as *const u8;
107                    if data_ptr.is_null() {
108                        return Err(BridgeError::NullPointer);
109                    }
110                    let len = std::ptr::read_unaligned(data_ptr as *const u64) as usize;
111                    if len > MAX_DATA_SIZE {
112                        return Err(BridgeError::DataTooLarge { len });
113                    }
114                    let bytes_ptr = data_ptr.add(8);
115                    let bytes = std::slice::from_raw_parts(bytes_ptr, len).to_vec();
116                    Ok(Value::Lit(Literal::LitString(bytes)))
117                }
118                x if x == LIT_TAG_ADDR => {
119                    // Addr# — intermediate value, shouldn't normally be a final result.
120                    // Wrap as empty LitString as graceful fallback.
121                    Ok(Value::Lit(Literal::LitString(vec![])))
122                }
123                x if x == LIT_TAG_BYTEARRAY => {
124                    // ByteArray# — raw pointer to [len: u64][bytes...]
125                    let ba_ptr = raw_value as *const u8;
126                    if ba_ptr.is_null() {
127                        return Ok(Value::ByteArray(std::sync::Arc::new(
128                            std::sync::Mutex::new(vec![]),
129                        )));
130                    }
131                    let len = std::ptr::read_unaligned(ba_ptr as *const u64) as usize;
132                    if len > MAX_DATA_SIZE {
133                        return Err(BridgeError::DataTooLarge { len });
134                    }
135                    let bytes_ptr = ba_ptr.add(8);
136                    let bytes = std::slice::from_raw_parts(bytes_ptr, len).to_vec();
137                    Ok(Value::ByteArray(std::sync::Arc::new(
138                        std::sync::Mutex::new(bytes),
139                    )))
140                }
141                x if x == LIT_TAG_SMALLARRAY || x == LIT_TAG_ARRAY => {
142                    // SmallArray# (8) / Array# (9) — boxed pointer arrays
143                    // Layout: [u64 length][ptr0][ptr1]...[ptrN-1]
144                    let arr_ptr = raw_value as *const u8;
145                    if arr_ptr.is_null() {
146                        return Ok(Value::Con(DataConId(0), vec![]));
147                    }
148                    let len = std::ptr::read_unaligned(arr_ptr as *const u64) as usize;
149                    if len > MAX_DATA_SIZE {
150                        return Err(BridgeError::DataTooLarge { len });
151                    }
152                    let mut elems = Vec::with_capacity(len);
153                    for i in 0..len {
154                        let elem_ptr = *(arr_ptr.add(8 + 8 * i) as *const *const u8);
155                        elems.push(heap_to_value_inner(elem_ptr, depth + 1, vmctx)?);
156                    }
157                    // Return as a generic Con with fields — the renderer will
158                    // see the constructor names from the wrapping Con objects
159                    // (e.g., Vector's Array constructor wraps this)
160                    Ok(Value::Con(DataConId(0), elems))
161                }
162                other => Err(BridgeError::UnexpectedLitTag(other as u8)),
163            }
164        }
165        t if t == layout::TAG_CON => {
166            let con_tag = *(ptr.add(layout::CON_TAG_OFFSET as usize) as *const u64);
167            let num_fields =
168                *(ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *const u16) as usize;
169            if num_fields > MAX_FIELDS {
170                return Err(BridgeError::TooManyFields { count: num_fields });
171            }
172            let mut fields = Vec::with_capacity(num_fields);
173            for i in 0..num_fields {
174                let field_ptr =
175                    *(ptr.add(layout::CON_FIELDS_OFFSET as usize + 8 * i) as *const *const u8);
176                fields.push(heap_to_value_inner(field_ptr, depth + 1, vmctx)?);
177            }
178            Ok(Value::Con(DataConId(con_tag), fields))
179        }
180        t if t == layout::TAG_THUNK => {
181            let state = unsafe { *ptr.add(layout::THUNK_STATE_OFFSET as usize) };
182            match state {
183                layout::THUNK_EVALUATED => {
184                    // Follow indirection pointer to the WHNF result
185                    let target = unsafe {
186                        *(ptr.add(layout::THUNK_INDIRECTION_OFFSET as usize) as *const *const u8)
187                    };
188                    heap_to_value_inner(target, depth + 1, vmctx)
189                }
190                _ if !vmctx.is_null() => {
191                    // Force the thunk via heap_force when vmctx is available
192                    let forced = crate::host_fns::heap_force(vmctx, ptr as *mut u8);
193                    if !forced.is_null() && !std::ptr::eq(forced, ptr) {
194                        heap_to_value_inner(forced as *const u8, depth + 1, vmctx)
195                    } else {
196                        Err(BridgeError::UnevaluatedThunk)
197                    }
198                }
199                layout::THUNK_UNEVALUATED => Err(BridgeError::UnevaluatedThunk),
200                layout::THUNK_BLACKHOLE => Err(BridgeError::BlackHole),
201                _ => Err(BridgeError::UnknownThunkState(state)),
202            }
203        }
204        t if t == layout::TAG_CLOSURE => {
205            // Unevaluated closure — return as opaque Value.
206            // This can happen when Array# elements haven't been forced.
207            // We represent it as a dummy Closure with empty env and body.
208            use tidepool_eval::env::Env;
209            use tidepool_repr::{CoreExpr, CoreFrame, VarId};
210            let expr = CoreExpr {
211                nodes: vec![CoreFrame::Var(VarId(0))],
212            };
213            Ok(Value::Closure(Env::new(), VarId(0), expr))
214        }
215        other => Err(BridgeError::UnexpectedHeapTag(other)),
216    }
217}
218
219/// Convert a Value to a heap-allocated object via VMContext bump allocation.
220///
221/// # Safety
222///
223/// `vmctx` must point to a valid VMContext with sufficient nursery space.
224pub unsafe fn value_to_heap(val: &Value, vmctx: &mut VMContext) -> Result<*mut u8, BridgeError> {
225    // SAFETY: Caller guarantees vmctx has a live nursery with sufficient space.
226    // All writes use known layout offsets within bump-allocated nursery memory.
227    match val {
228        Value::Lit(lit) => {
229            let ptr = bump_alloc_from_vmctx(vmctx, layout::LIT_TOTAL_SIZE as usize);
230            if ptr.is_null() {
231                return Err(BridgeError::NurseryExhausted);
232            }
233
234            match lit {
235                Literal::LitInt(n) => {
236                    heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
237                    *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_INT as u8;
238                    *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = *n;
239                }
240                Literal::LitWord(n) => {
241                    heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
242                    *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_WORD as u8;
243                    *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = *n as i64;
244                }
245                Literal::LitChar(c) => {
246                    heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
247                    *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_CHAR as u8;
248                    *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = *c as i64;
249                }
250                Literal::LitFloat(bits) => {
251                    heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
252                    *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_FLOAT as u8;
253                    *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = *bits as i64;
254                }
255                Literal::LitDouble(bits) => {
256                    heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
257                    *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_DOUBLE as u8;
258                    *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = *bits as i64;
259                }
260                Literal::LitString(bytes) => {
261                    // Allocate string data: [len: u64][bytes...]
262                    let data_size = 8 + bytes.len();
263                    let data_ptr = bump_alloc_from_vmctx(vmctx, data_size);
264                    if data_ptr.is_null() {
265                        // Roll back the Lit object allocation to avoid dead space in nursery
266                        vmctx.alloc_ptr = ptr;
267                        return Err(BridgeError::NurseryExhausted);
268                    }
269                    *(data_ptr as *mut u64) = bytes.len() as u64;
270                    std::ptr::copy_nonoverlapping(bytes.as_ptr(), data_ptr.add(8), bytes.len());
271
272                    // Only write the header once we're sure all allocations succeeded
273                    heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
274                    *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_STRING as u8;
275                    *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = data_ptr as i64;
276                }
277            }
278            Ok(ptr)
279        }
280        Value::Con(id, fields) => {
281            // Recursively convert fields first
282            let mut field_ptrs = Vec::with_capacity(fields.len());
283            for f in fields {
284                field_ptrs.push(value_to_heap(f, vmctx)?);
285            }
286
287            let size = 24 + 8 * fields.len();
288            let ptr = bump_alloc_from_vmctx(vmctx, size);
289            if ptr.is_null() {
290                return Err(BridgeError::NurseryExhausted);
291            }
292            heap_layout::write_header(ptr, layout::TAG_CON, size as u16);
293
294            *(ptr.add(layout::CON_TAG_OFFSET as usize) as *mut u64) = id.0;
295            *(ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *mut u16) = fields.len() as u16;
296
297            for (i, fp) in field_ptrs.into_iter().enumerate() {
298                *(ptr.add(layout::CON_FIELDS_OFFSET as usize + 8 * i) as *mut *mut u8) = fp;
299            }
300            Ok(ptr)
301        }
302        Value::ByteArray(bytes) => {
303            // ByteArray# stored as Lit with tag=7 (LIT_TAG_BYTEARRAY), value = ptr to [len: u64][bytes...]
304            // The byte data buffer must be allocated outside the GC nursery (via malloc)
305            // because GC doesn't track the interior pointer from the Lit wrapper to the
306            // data buffer. Using bump_alloc would place data in the nursery; after a
307            // Cheney copy, the Lit's data_ptr would point to stale fromspace memory.
308            let bytes = bytes
309                .lock()
310                .map_err(|e| BridgeError::InternalError(format!("mutex poisoned: {e}")))?;
311            let data_ptr = crate::host_fns::runtime_new_byte_array(bytes.len() as i64) as *mut u8;
312            if data_ptr.is_null() {
313                return Err(BridgeError::NurseryExhausted);
314            }
315            std::ptr::copy_nonoverlapping(bytes.as_ptr(), data_ptr.add(8), bytes.len());
316
317            let ptr = bump_alloc_from_vmctx(vmctx, layout::LIT_TOTAL_SIZE as usize);
318            if ptr.is_null() {
319                return Err(BridgeError::NurseryExhausted);
320            }
321            heap_layout::write_header(ptr, layout::TAG_LIT, layout::LIT_TOTAL_SIZE as u16);
322            *ptr.add(layout::LIT_TAG_OFFSET as usize) = LIT_TAG_BYTEARRAY as u8;
323            *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *mut i64) = data_ptr as i64;
324            Ok(ptr)
325        }
326        _ => Err(BridgeError::UnexpectedHeapTag(255)),
327    }
328}
329
330/// Bump-allocate from VMContext. Returns null if nursery is exhausted.
331///
332/// # Safety
333///
334/// `vmctx` must point to a valid VMContext with a live nursery.
335pub unsafe fn bump_alloc_from_vmctx(vmctx: &mut VMContext, size: usize) -> *mut u8 {
336    // SAFETY: Caller guarantees vmctx points to a valid VMContext with a live nursery.
337    // alloc_ptr and alloc_limit delimit the available nursery region.
338    // Align to 8 bytes
339    let aligned_size = (size + 7) & !7;
340    let ptr = vmctx.alloc_ptr;
341    let new_ptr = ptr.add(aligned_size);
342    if new_ptr as *const u8 > vmctx.alloc_limit {
343        return std::ptr::null_mut();
344    }
345    vmctx.alloc_ptr = new_ptr;
346    ptr
347}
348
349#[cfg(test)]
350mod tests {
351    // SAFETY: All unsafe blocks in tests call value_to_heap/heap_to_value with
352    // nursery-backed VMContexts created by setup_vmctx. The nursery is kept alive
353    // for the duration of each test, ensuring all heap pointers remain valid.
354    use super::*;
355    use crate::nursery::Nursery;
356    use std::sync::{Arc, Mutex};
357    use tidepool_repr::{DataConId, Literal};
358
359    extern "C" fn mock_gc_trigger(_vmctx: *mut VMContext) {}
360
361    fn setup_vmctx(size: usize) -> (Nursery, VMContext) {
362        let mut nursery = Nursery::new(size);
363        let vmctx = nursery.make_vmctx(mock_gc_trigger);
364        (nursery, vmctx)
365    }
366
367    #[test]
368    fn test_lit_int_roundtrip() {
369        let (_nursery, mut vmctx) = setup_vmctx(1024);
370        let val = Value::Lit(Literal::LitInt(42));
371        unsafe {
372            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
373            let back = heap_to_value(ptr).expect("heap_to_value failed");
374            if let Value::Lit(Literal::LitInt(n)) = back {
375                assert_eq!(n, 42);
376            } else {
377                panic!("Expected LitInt, got {:?}", back);
378            }
379        }
380    }
381
382    #[test]
383    fn test_lit_word_roundtrip() {
384        let (_nursery, mut vmctx) = setup_vmctx(1024);
385        let val = Value::Lit(Literal::LitWord(123));
386        unsafe {
387            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
388            let back = heap_to_value(ptr).expect("heap_to_value failed");
389            if let Value::Lit(Literal::LitWord(n)) = back {
390                assert_eq!(n, 123);
391            } else {
392                panic!("Expected LitWord, got {:?}", back);
393            }
394        }
395    }
396
397    #[test]
398    fn test_lit_char_roundtrip() {
399        let (_nursery, mut vmctx) = setup_vmctx(1024);
400        let val = Value::Lit(Literal::LitChar('λ'));
401        unsafe {
402            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
403            let back = heap_to_value(ptr).expect("heap_to_value failed");
404            if let Value::Lit(Literal::LitChar(c)) = back {
405                assert_eq!(c, 'λ');
406            } else {
407                panic!("Expected LitChar, got {:?}", back);
408            }
409        }
410    }
411
412    #[test]
413    fn test_lit_double_roundtrip() {
414        let (_nursery, mut vmctx) = setup_vmctx(1024);
415        let val = Value::Lit(Literal::LitDouble(f64::to_bits(1.2345678)));
416        unsafe {
417            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
418            let back = heap_to_value(ptr).expect("heap_to_value failed");
419            if let Value::Lit(Literal::LitDouble(bits)) = back {
420                assert_eq!(f64::from_bits(bits), 1.2345678);
421            } else {
422                panic!("Expected LitDouble, got {:?}", back);
423            }
424        }
425    }
426
427    #[test]
428    fn test_lit_string_roundtrip() {
429        let (_nursery, mut vmctx) = setup_vmctx(1024);
430        let bytes = b"hello world".to_vec();
431        let val = Value::Lit(Literal::LitString(bytes.clone()));
432        unsafe {
433            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
434            let back = heap_to_value(ptr).expect("heap_to_value failed");
435            if let Value::Lit(Literal::LitString(b)) = back {
436                assert_eq!(b, bytes);
437            } else {
438                panic!("Expected LitString, got {:?}", back);
439            }
440        }
441    }
442
443    #[test]
444    fn test_con_no_fields() {
445        let (_nursery, mut vmctx) = setup_vmctx(1024);
446        let val = Value::Con(DataConId(42), vec![]);
447        unsafe {
448            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
449            let back = heap_to_value(ptr).expect("heap_to_value failed");
450            if let Value::Con(id, fields) = back {
451                assert_eq!(id.0, 42);
452                assert!(fields.is_empty());
453            } else {
454                panic!("Expected Con, got {:?}", back);
455            }
456        }
457    }
458
459    #[test]
460    fn test_con_lit_fields() {
461        let (_nursery, mut vmctx) = setup_vmctx(1024);
462        let val = Value::Con(
463            DataConId(1),
464            vec![
465                Value::Lit(Literal::LitInt(10)),
466                Value::Lit(Literal::LitChar('a')),
467            ],
468        );
469        unsafe {
470            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
471            let back = heap_to_value(ptr).expect("heap_to_value failed");
472            if let Value::Con(id, fields) = back {
473                assert_eq!(id.0, 1);
474                assert_eq!(fields.len(), 2);
475                match (&fields[0], &fields[1]) {
476                    (Value::Lit(Literal::LitInt(10)), Value::Lit(Literal::LitChar('a'))) => (),
477                    _ => panic!("Expected [LitInt(10), LitChar('a')], got {:?}", fields),
478                }
479            } else {
480                panic!("Expected Con, got {:?}", back);
481            }
482        }
483    }
484
485    #[test]
486    fn test_con_nested() {
487        let (_nursery, mut vmctx) = setup_vmctx(1024);
488        // Just (I# 42)
489        let inner = Value::Con(DataConId(2), vec![Value::Lit(Literal::LitInt(42))]);
490        let val = Value::Con(DataConId(1), vec![inner]);
491        unsafe {
492            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
493            let back = heap_to_value(ptr).expect("heap_to_value failed");
494
495            if let Value::Con(id, fields) = back {
496                assert_eq!(id.0, 1);
497                assert_eq!(fields.len(), 1);
498                if let Value::Con(id2, fields2) = &fields[0] {
499                    assert_eq!(id2.0, 2);
500                    assert_eq!(fields2.len(), 1);
501                    if let Value::Lit(Literal::LitInt(n)) = &fields2[0] {
502                        assert_eq!(*n, 42);
503                    } else {
504                        panic!("Expected LitInt");
505                    }
506                } else {
507                    panic!("Expected nested Con");
508                }
509            } else {
510                panic!("Expected Con");
511            }
512        }
513    }
514
515    #[test]
516    fn test_byte_array_roundtrip() {
517        let (_nursery, mut vmctx) = setup_vmctx(1024);
518        let data = vec![1, 2, 3, 4, 5];
519        let val = Value::ByteArray(Arc::new(Mutex::new(data.clone())));
520        unsafe {
521            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
522            let back = heap_to_value(ptr).expect("heap_to_value failed");
523            if let Value::ByteArray(ba) = back {
524                assert_eq!(*ba.lock().unwrap(), data);
525            } else {
526                panic!("Expected ByteArray, got {:?}", back);
527            }
528        }
529    }
530
531    #[test]
532    fn test_bump_alloc_alignment() {
533        let (_nursery, mut vmctx) = setup_vmctx(1024);
534        unsafe {
535            let p1 = bump_alloc_from_vmctx(&mut vmctx, 1);
536            let p2 = bump_alloc_from_vmctx(&mut vmctx, 1);
537            assert_eq!(p1 as usize % 8, 0);
538            assert_eq!(p2 as usize % 8, 0);
539            assert_eq!(p2 as usize - p1 as usize, 8);
540        }
541    }
542
543    #[test]
544    fn test_bump_alloc_bounds() {
545        let (_nursery, mut vmctx) = setup_vmctx(16);
546        unsafe {
547            let p1 = bump_alloc_from_vmctx(&mut vmctx, 8);
548            assert!(!p1.is_null());
549            let p2 = bump_alloc_from_vmctx(&mut vmctx, 8);
550            assert!(!p2.is_null());
551            let p3 = bump_alloc_from_vmctx(&mut vmctx, 1);
552            assert!(p3.is_null());
553        }
554    }
555
556    #[test]
557    fn test_lit_float_roundtrip() {
558        let (_nursery, mut vmctx) = setup_vmctx(1024);
559        let bits = f32::to_bits(1.23f32) as u64;
560        let val = Value::Lit(Literal::LitFloat(bits));
561        unsafe {
562            let ptr = value_to_heap(&val, &mut vmctx).expect("value_to_heap failed");
563            let back = heap_to_value(ptr).expect("heap_to_value failed");
564            if let Value::Lit(Literal::LitFloat(b)) = back {
565                assert_eq!(b, bits);
566            } else {
567                panic!("Expected LitFloat, got {:?}", back);
568            }
569        }
570    }
571
572    #[test]
573    fn test_null_pointer_error() {
574        let result = unsafe { heap_to_value(std::ptr::null()) };
575        assert!(matches!(result, Err(BridgeError::NullPointer)));
576    }
577
578    #[test]
579    fn test_invalid_heap_tag() {
580        let buf = [0xFFu8; 32];
581        let result = unsafe { heap_to_value(buf.as_ptr()) };
582        assert!(matches!(result, Err(BridgeError::UnexpectedHeapTag(0xFF))));
583    }
584}