tidepool_codegen/
effect_machine.rs1use crate::context::VMContext;
2use crate::heap_bridge;
3use crate::yield_type::{Yield, YieldError};
4use tidepool_heap::layout;
5
6#[derive(Debug, Clone, Copy)]
12pub struct ConTags {
13 pub val: u64,
15 pub e: u64,
17 pub union: u64,
19 pub leaf: u64,
21 pub node: u64,
23}
24
25impl ConTags {
26 pub fn from_table(table: &tidepool_repr::DataConTable) -> Option<Self> {
28 Some(ConTags {
29 val: table.get_by_name("Val")?.0,
30 e: table.get_by_name("E")?.0,
31 union: table.get_by_name("Union")?.0,
32 leaf: table.get_by_name("Leaf")?.0,
33 node: table.get_by_name("Node")?.0,
34 })
35 }
36}
37
38pub struct CompiledEffectMachine {
47 func_ptr: unsafe extern "C" fn(*mut VMContext) -> *mut u8,
48 vmctx: VMContext,
49 tags: ConTags,
50}
51
52unsafe impl Send for CompiledEffectMachine {}
54
55impl CompiledEffectMachine {
56 pub fn new(
57 func_ptr: unsafe extern "C" fn(*mut VMContext) -> *mut u8,
58 vmctx: VMContext,
59 tags: ConTags,
60 ) -> Self {
61 Self {
62 func_ptr,
63 vmctx,
64 tags,
65 }
66 }
67
68 pub fn vmctx_mut(&mut self) -> &mut VMContext {
70 &mut self.vmctx
71 }
72
73 pub fn step(&mut self) -> Yield {
75 let result: *mut u8 = unsafe { (self.func_ptr)(&mut self.vmctx) };
76 self.parse_result(result)
77 }
78
79 pub unsafe fn resume(&mut self, continuation: *mut u8, response: *mut u8) -> Yield {
85 let result = self.apply_cont_heap(continuation, response);
86 self.parse_result(result)
87 }
88
89 fn parse_result(&self, result: *mut u8) -> Yield {
91 if result.is_null() {
92 if let Some(err) = crate::host_fns::take_runtime_error() {
94 return Yield::Error(match err {
95 crate::host_fns::RuntimeError::DivisionByZero => YieldError::DivisionByZero,
96 crate::host_fns::RuntimeError::Overflow => YieldError::Overflow,
97 });
98 }
99 return Yield::Error(YieldError::NullPointer);
100 }
101
102 let tag = unsafe { *result };
103 if tag != layout::TAG_CON {
104 return Yield::Error(YieldError::UnexpectedTag(tag));
105 }
106
107 let con_tag = unsafe { *(result.add(layout::CON_TAG_OFFSET) as *const u64) };
108
109 if con_tag == self.tags.val {
110 let num_fields = unsafe { *(result.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) };
112 if num_fields < 1 {
113 return Yield::Error(YieldError::BadValFields(num_fields));
114 }
115 let value = unsafe { *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8) };
116 Yield::Done(value)
117 } else if con_tag == self.tags.e {
118 let num_fields = unsafe { *(result.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) };
120 if num_fields != 2 {
121 return Yield::Error(YieldError::BadEFields(num_fields));
122 }
123 let union_ptr = unsafe { *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8) };
124 let continuation =
125 unsafe { *(result.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8) };
126
127 if union_ptr.is_null() {
128 return Yield::Error(YieldError::NullPointer);
129 }
130
131 let union_num_fields =
132 unsafe { *(union_ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) };
133 if union_num_fields != 2 {
134 return Yield::Error(YieldError::BadUnionFields(union_num_fields));
135 }
136
137 let tag_ptr =
138 unsafe { *(union_ptr.add(layout::CON_FIELDS_OFFSET) as *const *mut u8) };
139 if tag_ptr.is_null() {
140 return Yield::Error(YieldError::NullPointer);
141 }
142 let effect_tag = unsafe { *(tag_ptr.add(layout::LIT_VALUE_OFFSET) as *const u64) };
144 let request =
145 unsafe { *(union_ptr.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8) };
146
147 Yield::Request {
148 tag: effect_tag,
149 request,
150 continuation,
151 }
152 } else {
153 Yield::Error(YieldError::UnexpectedConTag(con_tag))
154 }
155 }
156
157 unsafe fn apply_cont_heap(&mut self, k: *mut u8, arg: *mut u8) -> *mut u8 {
168 if k.is_null() {
169 return std::ptr::null_mut();
170 }
171
172 let tag = *k;
173 match tag {
174 t if t == layout::TAG_CON => {
175 let con_tag = *(k.add(layout::CON_TAG_OFFSET) as *const u64);
176
177 if con_tag == self.tags.leaf {
178 let f = *(k.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
180 self.call_closure(f, arg)
181 } else if con_tag == self.tags.node {
182 let k1 = *(k.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
184 let k2 = *(k.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8);
185
186 let result = self.apply_cont_heap(k1, arg);
187 if result.is_null() {
188 return std::ptr::null_mut();
189 }
190
191 let result_tag = *result;
193 if result_tag != layout::TAG_CON {
194 return std::ptr::null_mut();
195 }
196
197 let result_con_tag =
198 *(result.add(layout::CON_TAG_OFFSET) as *const u64);
199
200 if result_con_tag == self.tags.val {
201 let y = *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
203 self.apply_cont_heap(k2, y)
204 } else if result_con_tag == self.tags.e {
205 let union_val =
207 *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
208 let k_prime =
209 *(result.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8);
210
211 let new_node = self.alloc_con(self.tags.node, &[k_prime, k2]);
213 self.alloc_con(self.tags.e, &[union_val, new_node])
215 } else {
216 std::ptr::null_mut()
217 }
218 } else {
219 std::ptr::null_mut()
221 }
222 }
223 t if t == layout::TAG_CLOSURE => {
224 self.call_closure(k, arg)
226 }
227 _ => std::ptr::null_mut(),
228 }
229 }
230
231 unsafe fn call_closure(&mut self, closure: *mut u8, arg: *mut u8) -> *mut u8 {
237 let code_ptr = *(closure.add(layout::CLOSURE_CODE_PTR_OFFSET) as *const usize);
238
239 let trace = crate::debug::trace_level();
240 if trace >= crate::debug::TraceLevel::Calls {
241 let name = crate::debug::lookup_lambda(code_ptr)
242 .unwrap_or_else(|| format!("0x{:x}", code_ptr));
243 eprintln!(
244 "[trace] call_closure {} closure={:?} arg={}",
245 name,
246 closure,
247 crate::debug::heap_describe(arg),
248 );
249 }
250 if trace >= crate::debug::TraceLevel::Heap {
251 if let Err(e) = crate::debug::heap_validate_deep(closure) {
252 eprintln!("[trace] INVALID closure: {}", e);
253 eprintln!("[trace] {}", crate::debug::heap_describe(closure));
254 return std::ptr::null_mut();
255 }
256 if let Err(e) = crate::debug::heap_validate(arg) {
257 eprintln!("[trace] INVALID arg: {}", e);
258 return std::ptr::null_mut();
259 }
260 let num_captured = *(closure.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
262 for i in 0..num_captured as usize {
263 let cap = *(closure.add(layout::CLOSURE_CAPTURED_OFFSET + 8 * i) as *const *const u8);
264 if cap.is_null() {
265 eprintln!("[trace] capture[{}] = NULL", i);
266 } else {
267 eprintln!("[trace] capture[{}] = {}", i, crate::debug::heap_describe(cap));
268 }
269 }
270 }
271
272 let func: unsafe extern "C" fn(*mut VMContext, *mut u8, *mut u8) -> *mut u8 =
273 std::mem::transmute(code_ptr);
274 let result = func(&mut self.vmctx, closure, arg);
275
276 if trace >= crate::debug::TraceLevel::Calls {
277 let name = crate::debug::lookup_lambda(code_ptr)
278 .unwrap_or_else(|| format!("0x{:x}", code_ptr));
279 if result.is_null() {
280 eprintln!("[trace] {} returned NULL", name);
281 } else {
282 eprintln!("[trace] {} returned {}", name, crate::debug::heap_describe(result));
283 }
284 }
285
286 result
287 }
288
289 unsafe fn alloc_con(&mut self, con_tag: u64, fields: &[*mut u8]) -> *mut u8 {
291 let size = 24 + 8 * fields.len();
292 let ptr = heap_bridge::bump_alloc_from_vmctx(&mut self.vmctx, size);
293 layout::write_header(ptr, layout::TAG_CON, size as u16);
294 *(ptr.add(layout::CON_TAG_OFFSET) as *mut u64) = con_tag;
295 *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *mut u16) = fields.len() as u16;
296 for (i, &fp) in fields.iter().enumerate() {
297 *(ptr.add(layout::CON_FIELDS_OFFSET + 8 * i) as *mut *mut u8) = fp;
298 }
299 ptr
300 }
301}