shape_vm/executor/vm_impl/
stack.rs1use super::super::*;
2
3impl VirtualMachine {
4 pub fn create_typed_enum(
5 &self,
6 enum_name: &str,
7 variant_name: &str,
8 payload: Vec<ValueWord>,
9 ) -> Option<ValueWord> {
10 let nb_payload: Vec<ValueWord> = payload.into_iter().map(|v| v).collect();
11 self.create_typed_enum_nb(enum_name, variant_name, nb_payload)
12 .map(|nb| nb.clone())
13 }
14
15 pub fn create_typed_enum_nb(
17 &self,
18 enum_name: &str,
19 variant_name: &str,
20 payload: Vec<ValueWord>,
21 ) -> Option<ValueWord> {
22 let schema = self.program.type_schema_registry.get(enum_name)?;
23 let enum_info = schema.get_enum_info()?;
24 let variant_id = enum_info.variant_id(variant_name)?;
25
26 let slot_count = 1 + enum_info.max_payload_fields() as usize;
28 let mut slots = Vec::with_capacity(slot_count);
29 let mut heap_mask: u64 = 0;
30
31 slots.push(ValueSlot::from_int(variant_id as i64));
33
34 for (i, nb) in payload.into_iter().enumerate() {
36 let slot_idx = 1 + i;
37 match nb.tag() {
38 shape_value::NanTag::F64 => {
39 slots.push(ValueSlot::from_number(nb.as_f64().unwrap_or(0.0)))
40 }
41 shape_value::NanTag::I48 => {
42 slots.push(ValueSlot::from_number(nb.as_i64().unwrap_or(0) as f64))
43 }
44 shape_value::NanTag::Bool => {
45 slots.push(ValueSlot::from_bool(nb.as_bool().unwrap_or(false)))
46 }
47 shape_value::NanTag::None => slots.push(ValueSlot::none()),
48 _ => {
49 if let Some(hv) = nb.as_heap_ref() {
50 slots.push(ValueSlot::from_heap(hv.clone()));
51 heap_mask |= 1u64 << slot_idx;
52 } else {
53 let id = nb
55 .as_function()
56 .or_else(|| nb.as_module_function().map(|u| u as u16))
57 .unwrap_or(0);
58 slots.push(ValueSlot::from_int(id as i64));
59 }
60 }
61 }
62 }
63
64 while slots.len() < slot_count {
66 slots.push(ValueSlot::none());
67 }
68
69 Some(ValueWord::from_heap_value(HeapValue::TypedObject {
70 schema_id: schema.id as u64,
71 slots: slots.into_boxed_slice(),
72 heap_mask,
73 }))
74 }
75
76 #[inline(always)]
83 pub(crate) fn push_vw(&mut self, value: ValueWord) -> Result<(), VMError> {
84 if self.sp >= self.stack.len() {
85 return self.push_vw_slow(value);
86 }
87 self.stack[self.sp] = value;
88 self.sp += 1;
89 Ok(())
90 }
91
92 #[cold]
94 #[inline(never)]
95 pub(crate) fn push_vw_slow(&mut self, value: ValueWord) -> Result<(), VMError> {
96 if self.sp >= self.config.max_stack_size {
97 return Err(VMError::StackOverflow);
98 }
99 let new_len = self.sp * 2 + 1;
100 self.stack.reserve(new_len - self.stack.len());
101 while self.stack.len() < new_len {
102 self.stack.push(ValueWord::none());
103 }
104 self.stack[self.sp] = value;
105 self.sp += 1;
106 Ok(())
107 }
108
109 #[inline(always)]
118 pub(crate) fn pop_vw(&mut self) -> Result<ValueWord, VMError> {
119 if self.sp == 0 {
120 return Self::pop_vw_underflow();
121 }
122 self.sp -= 1;
123 unsafe {
128 let ptr = self.stack.as_mut_ptr().add(self.sp);
129 let val = std::ptr::read(ptr);
130 std::ptr::write(ptr as *mut u64, 0xFFFB_0000_0000_0000u64);
133 Ok(val)
134 }
135 }
136
137 #[cold]
138 #[inline(never)]
139 pub(crate) fn pop_vw_underflow() -> Result<ValueWord, VMError> {
140 Err(VMError::StackUnderflow)
141 }
142
143 pub fn pop(&mut self) -> Result<ValueWord, VMError> {
145 Ok(self.pop_vw()?.clone())
146 }
147
148 pub(crate) fn blob_hash_for_function(&self, func_id: u16) -> Option<FunctionHash> {
151 self.function_hashes
152 .get(func_id as usize)
153 .copied()
154 .flatten()
155 }
156
157 #[inline]
158 pub(crate) fn function_id_for_blob_hash(&self, hash: FunctionHash) -> Option<u16> {
159 self.function_id_by_hash.get(&hash).copied()
160 }
161
162 pub(crate) fn current_locals_base(&self) -> usize {
163 self.call_stack
164 .last()
165 .map(|frame| frame.base_pointer)
166 .unwrap_or(0)
167 }
168}