Skip to main content

shape_vm/executor/vm_impl/
stack.rs

1use super::super::*;
2
3impl VirtualMachine {
4    pub fn create_typed_enum(
5        &self,
6        enum_name: &str,
7        variant_name: &str,
8        payload: Vec<ValueWord>,
9    ) -> Option<ValueWord> {
10        let nb_payload: Vec<ValueWord> = payload.into_iter().map(|v| v).collect();
11        self.create_typed_enum_nb(enum_name, variant_name, nb_payload)
12            .map(|nb| nb.clone())
13    }
14
15    /// Create a TypedObject enum value using ValueWord payload directly.
16    pub fn create_typed_enum_nb(
17        &self,
18        enum_name: &str,
19        variant_name: &str,
20        payload: Vec<ValueWord>,
21    ) -> Option<ValueWord> {
22        let schema = self.program.type_schema_registry.get(enum_name)?;
23        let enum_info = schema.get_enum_info()?;
24        let variant_id = enum_info.variant_id(variant_name)?;
25
26        // Build slots: slot 0 = variant_id, slot 1+ = payload
27        let slot_count = 1 + enum_info.max_payload_fields() as usize;
28        let mut slots = Vec::with_capacity(slot_count);
29        let mut heap_mask: u64 = 0;
30
31        // Slot 0: variant discriminator is an i64 field (`__variant`).
32        slots.push(ValueSlot::from_int(variant_id as i64));
33
34        // Payload slots
35        for (i, nb) in payload.into_iter().enumerate() {
36            let slot_idx = 1 + i;
37            match nb.tag() {
38                shape_value::NanTag::F64 => {
39                    slots.push(ValueSlot::from_number(nb.as_f64().unwrap_or(0.0)))
40                }
41                shape_value::NanTag::I48 => {
42                    slots.push(ValueSlot::from_number(nb.as_i64().unwrap_or(0) as f64))
43                }
44                shape_value::NanTag::Bool => {
45                    slots.push(ValueSlot::from_bool(nb.as_bool().unwrap_or(false)))
46                }
47                shape_value::NanTag::None => slots.push(ValueSlot::none()),
48                _ => {
49                    if let Some(hv) = nb.as_heap_ref() {
50                        slots.push(ValueSlot::from_heap(hv.clone()));
51                        heap_mask |= 1u64 << slot_idx;
52                    } else {
53                        // Function/ModuleFunction/Unit/other inline types: store as int slot
54                        let id = nb
55                            .as_function()
56                            .or_else(|| nb.as_module_function().map(|u| u as u16))
57                            .unwrap_or(0);
58                        slots.push(ValueSlot::from_int(id as i64));
59                    }
60                }
61            }
62        }
63
64        // Fill remaining payload slots with None
65        while slots.len() < slot_count {
66            slots.push(ValueSlot::none());
67        }
68
69        Some(ValueWord::from_heap_value(HeapValue::TypedObject {
70            schema_id: schema.id as u64,
71            slots: slots.into_boxed_slice(),
72            heap_mask,
73        }))
74    }
75
76    // --- ValueWord-direct stack ops for hot paths ---
77
78    /// Push a ValueWord value directly (no ValueWord conversion).
79    ///
80    /// Hot path: single bounds check + write.  The stack growth and overflow
81    /// checks are split into a cold `push_vw_slow` to keep the hot path tight.
82    #[inline(always)]
83    pub(crate) fn push_vw(&mut self, value: ValueWord) -> Result<(), VMError> {
84        if self.sp >= self.stack.len() {
85            return self.push_vw_slow(value);
86        }
87        self.stack[self.sp] = value;
88        self.sp += 1;
89        Ok(())
90    }
91
92    /// Cold path for push_vw: grow the stack or return StackOverflow.
93    #[cold]
94    #[inline(never)]
95    pub(crate) fn push_vw_slow(&mut self, value: ValueWord) -> Result<(), VMError> {
96        if self.sp >= self.config.max_stack_size {
97            return Err(VMError::StackOverflow);
98        }
99        let new_len = self.sp * 2 + 1;
100        self.stack.reserve(new_len - self.stack.len());
101        while self.stack.len() < new_len {
102            self.stack.push(ValueWord::none());
103        }
104        self.stack[self.sp] = value;
105        self.sp += 1;
106        Ok(())
107    }
108
109    /// Pop a ValueWord value directly (no ValueWord conversion).
110    ///
111    /// Uses `ptr::read` to take ownership of the value, then writes a
112    /// ValueWord::none() sentinel via raw pointer to prevent double-free on
113    /// Vec drop — avoiding bounds checks and the full `mem::replace` protocol.
114    ///
115    /// The underflow check is retained for safety but marked cold so the
116    /// branch predictor always predicts the fast path (sp > 0).
117    #[inline(always)]
118    pub(crate) fn pop_vw(&mut self) -> Result<ValueWord, VMError> {
119        if self.sp == 0 {
120            return Self::pop_vw_underflow();
121        }
122        self.sp -= 1;
123        // SAFETY: sp was > 0 before decrement, so self.sp is a valid index
124        // into self.stack (which is pre-allocated to at least DEFAULT_STACK_CAPACITY).
125        // We take ownership via ptr::read and immediately overwrite the slot with
126        // a None sentinel so the Vec destructor won't double-free any heap ValueWord.
127        unsafe {
128            let ptr = self.stack.as_mut_ptr().add(self.sp);
129            let val = std::ptr::read(ptr);
130            // Write ValueWord::none() bit pattern directly. This is TAG_BASE | (TAG_NONE << 48)
131            // = 0xFFFB_0000_0000_0000. It's a non-heap tagged value so Drop is a no-op.
132            std::ptr::write(ptr as *mut u64, 0xFFFB_0000_0000_0000u64);
133            Ok(val)
134        }
135    }
136
137    #[cold]
138    #[inline(never)]
139    pub(crate) fn pop_vw_underflow() -> Result<ValueWord, VMError> {
140        Err(VMError::StackUnderflow)
141    }
142
143    /// Pop and materialize a ValueWord from the stack (convenience for tests and legacy callers).
144    pub fn pop(&mut self) -> Result<ValueWord, VMError> {
145        Ok(self.pop_vw()?.clone())
146    }
147
148    // ===== Hash and frame helpers =====
149
150    pub(crate) fn blob_hash_for_function(&self, func_id: u16) -> Option<FunctionHash> {
151        self.function_hashes
152            .get(func_id as usize)
153            .copied()
154            .flatten()
155    }
156
157    #[inline]
158    pub(crate) fn function_id_for_blob_hash(&self, hash: FunctionHash) -> Option<u16> {
159        self.function_id_by_hash.get(&hash).copied()
160    }
161
162    pub(crate) fn current_locals_base(&self) -> usize {
163        self.call_stack
164            .last()
165            .map(|frame| frame.base_pointer)
166            .unwrap_or(0)
167    }
168}