Skip to main content

tidepool_codegen/
heap_bridge.rs

1use crate::context::VMContext;
2use tidepool_eval::value::Value;
3use tidepool_heap::layout;
4use tidepool_repr::{DataConId, Literal};
5use std::fmt;
6
7#[derive(Debug)]
8pub enum BridgeError {
9    UnexpectedHeapTag(u8),
10    UnexpectedLitTag(u8),
11    NullPointer,
12}
13
14impl fmt::Display for BridgeError {
15    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
16        match self {
17            BridgeError::UnexpectedHeapTag(t) => write!(f, "unexpected heap tag: {}", t),
18            BridgeError::UnexpectedLitTag(t) => write!(f, "unexpected lit tag: {}", t),
19            BridgeError::NullPointer => write!(f, "null pointer"),
20        }
21    }
22}
23
24impl std::error::Error for BridgeError {}
25
26/// Convert a heap-allocated object to a Value.
27///
28/// # Safety
29///
30/// `ptr` must point to a valid HeapObject allocated by the JIT nursery.
31pub unsafe fn heap_to_value(ptr: *const u8) -> Result<Value, BridgeError> {
32    if ptr.is_null() {
33        return Err(BridgeError::NullPointer);
34    }
35
36    let tag = *ptr;
37    match tag {
38        t if t == layout::TAG_LIT => {
39            let lit_tag = *ptr.add(layout::LIT_TAG_OFFSET) as i64;
40            let raw_value = *(ptr.add(layout::LIT_VALUE_OFFSET) as *const i64);
41
42            match lit_tag {
43                0 => Ok(Value::Lit(Literal::LitInt(raw_value))),
44                1 => Ok(Value::Lit(Literal::LitWord(raw_value as u64))),
45                2 => Ok(Value::Lit(Literal::LitChar(
46                    char::from_u32(raw_value as u32).unwrap_or('\0'),
47                ))),
48                3 => Ok(Value::Lit(Literal::LitFloat(raw_value as u64))),
49                4 => Ok(Value::Lit(Literal::LitDouble(raw_value as u64))),
50                5 => {
51                    // LitString: value is pointer to [len: u64][bytes...]
52                    // Use read_unaligned because JIT data sections may not be 8-byte aligned
53                    let data_ptr = raw_value as *const u8;
54                    if data_ptr.is_null() {
55                        return Err(BridgeError::NullPointer);
56                    }
57                    let len = std::ptr::read_unaligned(data_ptr as *const u64) as usize;
58                    let bytes_ptr = data_ptr.add(8);
59                    let bytes = std::slice::from_raw_parts(bytes_ptr, len).to_vec();
60                    Ok(Value::Lit(Literal::LitString(bytes)))
61                }
62                other => Err(BridgeError::UnexpectedLitTag(other as u8)),
63            }
64        }
65        t if t == layout::TAG_CON => {
66            let con_tag = *(ptr.add(layout::CON_TAG_OFFSET) as *const u64);
67            let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) as usize;
68            let mut fields = Vec::with_capacity(num_fields);
69            for i in 0..num_fields {
70                let field_ptr =
71                    *(ptr.add(layout::CON_FIELDS_OFFSET + 8 * i) as *const *const u8);
72                fields.push(heap_to_value(field_ptr)?);
73            }
74            Ok(Value::Con(DataConId(con_tag), fields))
75        }
76        other => Err(BridgeError::UnexpectedHeapTag(other)),
77    }
78}
79
80/// Convert a Value to a heap-allocated object via VMContext bump allocation.
81///
82/// # Safety
83///
84/// `vmctx` must point to a valid VMContext with sufficient nursery space.
85pub unsafe fn value_to_heap(val: &Value, vmctx: &mut VMContext) -> Result<*mut u8, BridgeError> {
86    match val {
87        Value::Lit(lit) => {
88            let ptr = bump_alloc_from_vmctx(vmctx, layout::LIT_SIZE);
89            layout::write_header(ptr, layout::TAG_LIT, layout::LIT_SIZE as u16);
90
91            match lit {
92                Literal::LitInt(n) => {
93                    *ptr.add(layout::LIT_TAG_OFFSET) = 0;
94                    *(ptr.add(layout::LIT_VALUE_OFFSET) as *mut i64) = *n;
95                }
96                Literal::LitWord(n) => {
97                    *ptr.add(layout::LIT_TAG_OFFSET) = 1;
98                    *(ptr.add(layout::LIT_VALUE_OFFSET) as *mut i64) = *n as i64;
99                }
100                Literal::LitChar(c) => {
101                    *ptr.add(layout::LIT_TAG_OFFSET) = 2;
102                    *(ptr.add(layout::LIT_VALUE_OFFSET) as *mut i64) = *c as i64;
103                }
104                Literal::LitFloat(bits) => {
105                    *ptr.add(layout::LIT_TAG_OFFSET) = 3;
106                    *(ptr.add(layout::LIT_VALUE_OFFSET) as *mut i64) = *bits as i64;
107                }
108                Literal::LitDouble(bits) => {
109                    *ptr.add(layout::LIT_TAG_OFFSET) = 4;
110                    *(ptr.add(layout::LIT_VALUE_OFFSET) as *mut i64) = *bits as i64;
111                }
112                Literal::LitString(bytes) => {
113                    // Allocate string data: [len: u64][bytes...]
114                    let data_size = 8 + bytes.len();
115                    let data_ptr = bump_alloc_from_vmctx(vmctx, data_size);
116                    *(data_ptr as *mut u64) = bytes.len() as u64;
117                    std::ptr::copy_nonoverlapping(
118                        bytes.as_ptr(),
119                        data_ptr.add(8),
120                        bytes.len(),
121                    );
122                    *ptr.add(layout::LIT_TAG_OFFSET) = 5;
123                    *(ptr.add(layout::LIT_VALUE_OFFSET) as *mut i64) = data_ptr as i64;
124                }
125            }
126            Ok(ptr)
127        }
128        Value::Con(id, fields) => {
129            // Recursively convert fields first
130            let mut field_ptrs = Vec::with_capacity(fields.len());
131            for f in fields {
132                field_ptrs.push(value_to_heap(f, vmctx)?);
133            }
134
135            let size = 24 + 8 * fields.len();
136            let ptr = bump_alloc_from_vmctx(vmctx, size);
137            layout::write_header(ptr, layout::TAG_CON, size as u16);
138
139            *(ptr.add(layout::CON_TAG_OFFSET) as *mut u64) = id.0;
140            *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *mut u16) = fields.len() as u16;
141
142            for (i, fp) in field_ptrs.into_iter().enumerate() {
143                *(ptr.add(layout::CON_FIELDS_OFFSET + 8 * i) as *mut *mut u8) = fp;
144            }
145            Ok(ptr)
146        }
147        _ => Err(BridgeError::UnexpectedHeapTag(255)),
148    }
149}
150
151/// Bump-allocate from VMContext. Panics if nursery is exhausted.
152///
153/// # Safety
154///
155/// `vmctx` must point to a valid VMContext with a live nursery.
156pub unsafe fn bump_alloc_from_vmctx(vmctx: &mut VMContext, size: usize) -> *mut u8 {
157    // Align to 8 bytes
158    let aligned_size = (size + 7) & !7;
159    let ptr = vmctx.alloc_ptr;
160    let new_ptr = ptr.add(aligned_size);
161    if new_ptr as *const u8 > vmctx.alloc_limit {
162        panic!(
163            "nursery exhausted: tried to allocate {} bytes, {} remaining",
164            aligned_size,
165            vmctx.alloc_limit as usize - ptr as usize
166        );
167    }
168    vmctx.alloc_ptr = new_ptr;
169    ptr
170}