Skip to main content

shape_value/
heap_header.rs

1//! Fixed-layout heap object header for JIT-friendly type dispatch.
2//!
3//! `HeapHeader` is a `#[repr(C, align(16))]` struct that prefixes heap-allocated
4//! objects, giving the JIT a stable memory layout to read the object's kind, length,
5//! and capacity without depending on Rust's enum discriminant layout.
6//!
7//! ## Memory layout (32 bytes, 16-byte aligned)
8//!
9//! ```text
10//! Offset  Size  Field
11//! ------  ----  -----
12//!   0       2   kind (HeapKind as u16)
13//!   2       1   elem_type (element type hint for arrays/typed objects)
14//!   3       1   flags (bitfield: MARKED, PINNED, READONLY, etc.)
15//!   4       4   len (element count / field count)
16//!   8       4   cap (allocated capacity, 0 if not applicable)
17//!  12       4   (padding)
18//!  16       8   aux (auxiliary data: schema_id, function_id, etc.)
19//!  24       8   (reserved / future use)
20//! ```
21
22use crate::heap_value::{HeapKind, HeapValue};
23
24/// Flag: object has been marked by the GC during a collection cycle.
25pub const FLAG_MARKED: u8 = 0b0000_0001;
26/// Flag: object is pinned and must not be relocated by the GC.
27pub const FLAG_PINNED: u8 = 0b0000_0010;
28/// Flag: object is read-only (immutable after construction).
29pub const FLAG_READONLY: u8 = 0b0000_0100;
30
31/// Fixed-layout header for heap-allocated objects.
32///
33/// This struct is designed to be readable by JIT-generated code at known offsets.
34/// The JIT can load `kind` at offset 0, `len` at offset 4, and `aux` at offset 16
35/// without any Rust ABI knowledge.
36#[derive(Debug, Clone, Copy, PartialEq, Eq)]
37#[repr(C, align(16))]
38pub struct HeapHeader {
39    /// Object type discriminator (matches `HeapKind` and `HEAP_KIND_*` constants).
40    pub kind: u16,
41    /// Element type hint for homogeneous containers (0 = untyped/mixed).
42    /// For arrays: 1=f64, 2=i64, 3=string, 4=bool, 5=typed_object.
43    /// For typed objects: unused (0).
44    pub elem_type: u8,
45    /// Bitfield flags (FLAG_MARKED, FLAG_PINNED, FLAG_READONLY).
46    pub flags: u8,
47    /// Element count (array length, field count, string byte length, etc.).
48    pub len: u32,
49    /// Allocated capacity (for growable containers). 0 if not applicable.
50    pub cap: u32,
51    /// Padding to align `aux` at offset 16.
52    _pad: u32,
53    /// Auxiliary data interpreted per-kind:
54    /// - TypedObject: schema_id (u64)
55    /// - Closure: function_id (low u16) | captures_count (next u16)
56    /// - TypedTable/RowView/ColumnRef/IndexedTable: schema_id (u64)
57    /// - Future: future_id (u64)
58    /// - Enum: variant_id (low u32)
59    /// - Other: 0
60    pub aux: u64,
61    /// Reserved for future use (e.g., GC forwarding pointer).
62    _reserved: u64,
63}
64
65/// Compile-time size and offset assertions.
66const _: () = {
67    assert!(std::mem::size_of::<HeapHeader>() == 32);
68    assert!(std::mem::align_of::<HeapHeader>() == 16);
69};
70
71/// Element type hints for the `elem_type` field.
72pub mod elem_types {
73    /// Untyped or mixed-type container.
74    pub const UNTYPED: u8 = 0;
75    /// All elements are f64.
76    pub const F64: u8 = 1;
77    /// All elements are i64.
78    pub const I64: u8 = 2;
79    /// All elements are strings.
80    pub const STRING: u8 = 3;
81    /// All elements are bools.
82    pub const BOOL: u8 = 4;
83    /// All elements are typed objects.
84    pub const TYPED_OBJECT: u8 = 5;
85}
86
87impl HeapHeader {
88    /// Create a new HeapHeader with the given kind. All other fields are zeroed.
89    #[inline]
90    pub fn new(kind: HeapKind) -> Self {
91        Self {
92            kind: kind as u16,
93            elem_type: 0,
94            flags: 0,
95            len: 0,
96            cap: 0,
97            _pad: 0,
98            aux: 0,
99            _reserved: 0,
100        }
101    }
102
103    /// Create a HeapHeader with kind, length, and auxiliary data.
104    #[inline]
105    pub fn with_len_aux(kind: HeapKind, len: u32, aux: u64) -> Self {
106        Self {
107            kind: kind as u16,
108            elem_type: 0,
109            flags: 0,
110            len,
111            cap: 0,
112            _pad: 0,
113            aux,
114            _reserved: 0,
115        }
116    }
117
118    /// Build a HeapHeader from an existing HeapValue.
119    ///
120    /// Extracts kind, length, and auxiliary data from the HeapValue's contents.
121    pub fn from_heap_value(value: &HeapValue) -> Self {
122        let kind = value.kind();
123        let mut header = Self::new(kind);
124
125        match value {
126            HeapValue::String(s) => {
127                header.len = s.len() as u32;
128            }
129            HeapValue::Array(arr) => {
130                header.len = arr.len() as u32;
131                header.cap = arr.len() as u32;
132            }
133            HeapValue::TypedObject {
134                schema_id, slots, ..
135            } => {
136                header.len = slots.len() as u32;
137                header.aux = *schema_id;
138            }
139            HeapValue::Closure {
140                function_id,
141                upvalues,
142            } => {
143                header.len = upvalues.len() as u32;
144                header.aux = *function_id as u64;
145            }
146            HeapValue::DataTable(dt) => {
147                header.len = dt.row_count() as u32;
148            }
149            HeapValue::TypedTable { schema_id, table } => {
150                header.len = table.row_count() as u32;
151                header.aux = *schema_id;
152            }
153            HeapValue::RowView {
154                schema_id, row_idx, ..
155            } => {
156                header.len = 1;
157                header.aux = *schema_id;
158                // Store row_idx in the lower 32 bits of _reserved via cap field
159                header.cap = *row_idx as u32;
160            }
161            HeapValue::ColumnRef {
162                schema_id, col_id, ..
163            } => {
164                header.aux = *schema_id;
165                header.cap = *col_id;
166            }
167            HeapValue::IndexedTable {
168                schema_id,
169                table,
170                index_col,
171            } => {
172                header.len = table.row_count() as u32;
173                header.aux = *schema_id;
174                header.cap = *index_col;
175            }
176            HeapValue::Enum(_) => {
177                // Enum variant is identified by name, not index; no numeric aux needed.
178            }
179            HeapValue::Future(id) => {
180                header.aux = *id;
181            }
182            HeapValue::TaskGroup { kind, task_ids } => {
183                header.elem_type = *kind;
184                header.len = task_ids.len() as u32;
185            }
186            // Remaining types: kind is sufficient, no extra metadata needed.
187            _ => {}
188        }
189
190        header
191    }
192
193    /// Get the HeapKind from this header.
194    #[inline]
195    pub fn heap_kind(&self) -> Option<HeapKind> {
196        HeapKind::from_u16(self.kind)
197    }
198
199    /// Check if a flag is set.
200    #[inline]
201    pub fn has_flag(&self, flag: u8) -> bool {
202        self.flags & flag != 0
203    }
204
205    /// Set a flag.
206    #[inline]
207    pub fn set_flag(&mut self, flag: u8) {
208        self.flags |= flag;
209    }
210
211    /// Clear a flag.
212    #[inline]
213    pub fn clear_flag(&mut self, flag: u8) {
214        self.flags &= !flag;
215    }
216
217    /// Byte offset of the `kind` field from the start of the header.
218    pub const OFFSET_KIND: usize = 0;
219    /// Byte offset of the `elem_type` field.
220    pub const OFFSET_ELEM_TYPE: usize = 2;
221    /// Byte offset of the `flags` field.
222    pub const OFFSET_FLAGS: usize = 3;
223    /// Byte offset of the `len` field.
224    pub const OFFSET_LEN: usize = 4;
225    /// Byte offset of the `cap` field.
226    pub const OFFSET_CAP: usize = 8;
227    /// Byte offset of the `aux` field.
228    pub const OFFSET_AUX: usize = 16;
229}
230
231impl HeapKind {
232    /// Convert a u16 discriminant to a HeapKind, returning None if out of range.
233    #[inline]
234    pub fn from_u16(v: u16) -> Option<Self> {
235        if v <= HeapKind::F32Array as u16 {
236            // Safety: HeapKind is repr(u8) with contiguous variants from 0..=max.
237            // We checked the range, and u16 fits in u8 for valid values.
238            Some(unsafe { std::mem::transmute(v as u8) })
239        } else {
240            None
241        }
242    }
243
244    /// Convert a u8 discriminant to a HeapKind, returning None if out of range.
245    #[inline]
246    pub fn from_u8(v: u8) -> Option<Self> {
247        Self::from_u16(v as u16)
248    }
249}
250
251#[cfg(test)]
252mod tests {
253    use super::*;
254
255    #[test]
256    fn test_header_size_and_alignment() {
257        assert_eq!(std::mem::size_of::<HeapHeader>(), 32);
258        assert_eq!(std::mem::align_of::<HeapHeader>(), 16);
259    }
260
261    #[test]
262    fn test_header_field_offsets() {
263        // Verify offsets match the documented layout using offset_of!
264        assert_eq!(HeapHeader::OFFSET_KIND, 0);
265        assert_eq!(HeapHeader::OFFSET_ELEM_TYPE, 2);
266        assert_eq!(HeapHeader::OFFSET_FLAGS, 3);
267        assert_eq!(HeapHeader::OFFSET_LEN, 4);
268        assert_eq!(HeapHeader::OFFSET_CAP, 8);
269        assert_eq!(HeapHeader::OFFSET_AUX, 16);
270
271        // Verify with actual struct field offsets
272        let h = HeapHeader::new(HeapKind::String);
273        let base = &h as *const _ as usize;
274        assert_eq!(&h.kind as *const _ as usize - base, HeapHeader::OFFSET_KIND);
275        assert_eq!(
276            &h.elem_type as *const _ as usize - base,
277            HeapHeader::OFFSET_ELEM_TYPE
278        );
279        assert_eq!(
280            &h.flags as *const _ as usize - base,
281            HeapHeader::OFFSET_FLAGS
282        );
283        assert_eq!(&h.len as *const _ as usize - base, HeapHeader::OFFSET_LEN);
284        assert_eq!(&h.cap as *const _ as usize - base, HeapHeader::OFFSET_CAP);
285        assert_eq!(&h.aux as *const _ as usize - base, HeapHeader::OFFSET_AUX);
286    }
287
288    #[test]
289    fn test_new_header() {
290        let h = HeapHeader::new(HeapKind::Array);
291        assert_eq!(h.kind, HeapKind::Array as u16);
292        assert_eq!(h.elem_type, 0);
293        assert_eq!(h.flags, 0);
294        assert_eq!(h.len, 0);
295        assert_eq!(h.cap, 0);
296        assert_eq!(h.aux, 0);
297    }
298
299    #[test]
300    fn test_with_len_aux() {
301        let h = HeapHeader::with_len_aux(HeapKind::TypedObject, 5, 0xDEAD_BEEF);
302        assert_eq!(h.kind, HeapKind::TypedObject as u16);
303        assert_eq!(h.len, 5);
304        assert_eq!(h.aux, 0xDEAD_BEEF);
305    }
306
307    #[test]
308    fn test_heap_kind_roundtrip() {
309        assert_eq!(HeapKind::from_u16(0), Some(HeapKind::String));
310        assert_eq!(HeapKind::from_u16(1), Some(HeapKind::Array));
311        assert_eq!(HeapKind::from_u16(2), Some(HeapKind::TypedObject));
312        assert_eq!(
313            HeapKind::from_u16(HeapKind::F32Array as u16),
314            Some(HeapKind::F32Array)
315        );
316        assert_eq!(HeapKind::from_u16(255), None);
317    }
318
319    #[test]
320    fn test_heap_kind_from_u8() {
321        assert_eq!(HeapKind::from_u8(0), Some(HeapKind::String));
322        assert_eq!(
323            HeapKind::from_u8(HeapKind::F32Array as u8),
324            Some(HeapKind::F32Array)
325        );
326        assert_eq!(HeapKind::from_u8(200), None);
327    }
328
329    #[test]
330    fn test_flags() {
331        let mut h = HeapHeader::new(HeapKind::Array);
332        assert!(!h.has_flag(FLAG_MARKED));
333        assert!(!h.has_flag(FLAG_PINNED));
334
335        h.set_flag(FLAG_MARKED);
336        assert!(h.has_flag(FLAG_MARKED));
337        assert!(!h.has_flag(FLAG_PINNED));
338
339        h.set_flag(FLAG_PINNED);
340        assert!(h.has_flag(FLAG_MARKED));
341        assert!(h.has_flag(FLAG_PINNED));
342
343        h.clear_flag(FLAG_MARKED);
344        assert!(!h.has_flag(FLAG_MARKED));
345        assert!(h.has_flag(FLAG_PINNED));
346    }
347
348    #[test]
349    fn test_from_heap_value_string() {
350        let hv = HeapValue::String(std::sync::Arc::new("hello".to_string()));
351        let h = HeapHeader::from_heap_value(&hv);
352        assert_eq!(h.kind, HeapKind::String as u16);
353        assert_eq!(h.len, 5);
354    }
355
356    #[test]
357    fn test_from_heap_value_typed_object() {
358        let hv = HeapValue::TypedObject {
359            schema_id: 42,
360            slots: vec![crate::slot::ValueSlot::from_number(0.0); 3].into_boxed_slice(),
361            heap_mask: 0,
362        };
363        let h = HeapHeader::from_heap_value(&hv);
364        assert_eq!(h.kind, HeapKind::TypedObject as u16);
365        assert_eq!(h.len, 3);
366        assert_eq!(h.aux, 42);
367    }
368
369    #[test]
370    fn test_from_heap_value_closure() {
371        let hv = HeapValue::Closure {
372            function_id: 7,
373            upvalues: vec![],
374        };
375        let h = HeapHeader::from_heap_value(&hv);
376        assert_eq!(h.kind, HeapKind::Closure as u16);
377        assert_eq!(h.len, 0);
378        assert_eq!(h.aux, 7);
379    }
380}