Skip to main content

tidepool_codegen/
effect_machine.rs

1use crate::context::VMContext;
2use crate::heap_bridge;
3use crate::yield_type::{Yield, YieldError};
4use tidepool_heap::layout;
5
6/// Constructor tags for the freer-simple Eff type.
7///
8/// These identify which DataCon a heap-allocated constructor represents,
9/// allowing the effect machine to distinguish Val (pure result) from
10/// E (effect request) and destructure Union wrappers and Leaf/Node continuations.
11#[derive(Debug, Clone, Copy)]
12pub struct ConTags {
13    /// Con_tag for the Val constructor (pure result).
14    pub val: u64,
15    /// Con_tag for the E constructor (effect request).
16    pub e: u64,
17    /// Con_tag for the Union constructor (effect type wrapper).
18    pub union: u64,
19    /// Con_tag for the Leaf constructor (leaf continuation).
20    pub leaf: u64,
21    /// Con_tag for the Node constructor (composed continuation).
22    pub node: u64,
23}
24
25impl ConTags {
26    /// Resolve freer-simple constructor tags from a DataConTable.
27    pub fn from_table(table: &tidepool_repr::DataConTable) -> Option<Self> {
28        Some(ConTags {
29            val: table.get_by_name("Val")?.0,
30            e: table.get_by_name("E")?.0,
31            union: table.get_by_name("Union")?.0,
32            leaf: table.get_by_name("Leaf")?.0,
33            node: table.get_by_name("Node")?.0,
34        })
35    }
36}
37
38/// Compiled effect machine — drives JIT-compiled freer-simple effect stacks.
39///
40/// The step/resume protocol:
41/// 1. step() calls the compiled function, parses the result:
42///    - Con with Val con_tag → Yield::Done(value)
43///    - Con with E con_tag → Yield::Request(tag, request, continuation)
44/// 2. resume(continuation, response) applies the continuation tree to the response
45///    and parses the resulting heap object.
46pub struct CompiledEffectMachine {
47    func_ptr: unsafe extern "C" fn(*mut VMContext) -> *mut u8,
48    vmctx: VMContext,
49    tags: ConTags,
50}
51
52// SAFETY: All fields are raw pointers or function pointers, which are Send.
53unsafe impl Send for CompiledEffectMachine {}
54
55impl CompiledEffectMachine {
56    pub fn new(
57        func_ptr: unsafe extern "C" fn(*mut VMContext) -> *mut u8,
58        vmctx: VMContext,
59        tags: ConTags,
60    ) -> Self {
61        Self {
62            func_ptr,
63            vmctx,
64            tags,
65        }
66    }
67
68    /// Access the VMContext (e.g., to update nursery pointers after GC).
69    pub fn vmctx_mut(&mut self) -> &mut VMContext {
70        &mut self.vmctx
71    }
72
73    /// Execute the compiled function and parse the result.
74    pub fn step(&mut self) -> Yield {
75        let result: *mut u8 = unsafe { (self.func_ptr)(&mut self.vmctx) };
76        self.parse_result(result)
77    }
78
79    /// Resume after handling an effect by applying the continuation to the response.
80    ///
81    /// # Safety
82    ///
83    /// `continuation` and `response` must be valid heap pointers from the nursery.
84    pub unsafe fn resume(&mut self, continuation: *mut u8, response: *mut u8) -> Yield {
85        let result = self.apply_cont_heap(continuation, response);
86        self.parse_result(result)
87    }
88
89    /// Parse a heap-allocated Eff result into a Yield.
90    fn parse_result(&self, result: *mut u8) -> Yield {
91        if result.is_null() {
92            // Check for runtime error set by JIT code (overflowError/divZeroError)
93            if let Some(err) = crate::host_fns::take_runtime_error() {
94                return Yield::Error(match err {
95                    crate::host_fns::RuntimeError::DivisionByZero => YieldError::DivisionByZero,
96                    crate::host_fns::RuntimeError::Overflow => YieldError::Overflow,
97                });
98            }
99            return Yield::Error(YieldError::NullPointer);
100        }
101
102        let tag = unsafe { *result };
103        if tag != layout::TAG_CON {
104            return Yield::Error(YieldError::UnexpectedTag(tag));
105        }
106
107        let con_tag = unsafe { *(result.add(layout::CON_TAG_OFFSET) as *const u64) };
108
109        if con_tag == self.tags.val {
110            // Val(value) — extract value from fields[0]
111            let num_fields = unsafe { *(result.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) };
112            if num_fields < 1 {
113                return Yield::Error(YieldError::BadValFields(num_fields));
114            }
115            let value = unsafe { *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8) };
116            Yield::Done(value)
117        } else if con_tag == self.tags.e {
118            // E(union, continuation) — extract Union and k
119            let num_fields = unsafe { *(result.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) };
120            if num_fields != 2 {
121                return Yield::Error(YieldError::BadEFields(num_fields));
122            }
123            let union_ptr = unsafe { *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8) };
124            let continuation =
125                unsafe { *(result.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8) };
126
127            if union_ptr.is_null() {
128                return Yield::Error(YieldError::NullPointer);
129            }
130
131            let union_num_fields =
132                unsafe { *(union_ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16) };
133            if union_num_fields != 2 {
134                return Yield::Error(YieldError::BadUnionFields(union_num_fields));
135            }
136
137            let tag_ptr =
138                unsafe { *(union_ptr.add(layout::CON_FIELDS_OFFSET) as *const *mut u8) };
139            if tag_ptr.is_null() {
140                return Yield::Error(YieldError::NullPointer);
141            }
142            // Read the actual tag value from the Lit HeapObject (offset 16 = LIT_VALUE_OFFSET)
143            let effect_tag = unsafe { *(tag_ptr.add(layout::LIT_VALUE_OFFSET) as *const u64) };
144            let request =
145                unsafe { *(union_ptr.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8) };
146
147            Yield::Request {
148                tag: effect_tag,
149                request,
150                continuation,
151            }
152        } else {
153            Yield::Error(YieldError::UnexpectedConTag(con_tag))
154        }
155    }
156
157    /// Apply a Leaf/Node continuation tree to a value, yielding a new Eff result.
158    ///
159    /// Mirrors the interpreter's `apply_cont` on raw heap pointers:
160    /// - Leaf(f): call f(arg) via call_closure
161    /// - Node(k1, k2): apply k1(arg), if Val(y) → k2(y), if E(union, k') → E(union, Node(k', k2))
162    /// - Closure: direct call_closure (degenerate continuation fallback)
163    ///
164    /// # Safety
165    ///
166    /// `k` and `arg` must be valid heap pointers.
167    unsafe fn apply_cont_heap(&mut self, k: *mut u8, arg: *mut u8) -> *mut u8 {
168        if k.is_null() {
169            return std::ptr::null_mut();
170        }
171
172        let tag = *k;
173        match tag {
174            t if t == layout::TAG_CON => {
175                let con_tag = *(k.add(layout::CON_TAG_OFFSET) as *const u64);
176
177                if con_tag == self.tags.leaf {
178                    // Leaf(f) — extract closure f at field[0], call f(arg)
179                    let f = *(k.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
180                    self.call_closure(f, arg)
181                } else if con_tag == self.tags.node {
182                    // Node(k1, k2) — apply k1 to arg, then compose with k2
183                    let k1 = *(k.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
184                    let k2 = *(k.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8);
185
186                    let result = self.apply_cont_heap(k1, arg);
187                    if result.is_null() {
188                        return std::ptr::null_mut();
189                    }
190
191                    // Check if result is Val or E
192                    let result_tag = *result;
193                    if result_tag != layout::TAG_CON {
194                        return std::ptr::null_mut();
195                    }
196
197                    let result_con_tag =
198                        *(result.add(layout::CON_TAG_OFFSET) as *const u64);
199
200                    if result_con_tag == self.tags.val {
201                        // Val(y) — extract y, apply k2(y)
202                        let y = *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
203                        self.apply_cont_heap(k2, y)
204                    } else if result_con_tag == self.tags.e {
205                        // E(union, k') — compose: E(union, Node(k', k2))
206                        let union_val =
207                            *(result.add(layout::CON_FIELDS_OFFSET) as *const *mut u8);
208                        let k_prime =
209                            *(result.add(layout::CON_FIELDS_OFFSET + 8) as *const *mut u8);
210
211                        // Allocate Node(k', k2)
212                        let new_node = self.alloc_con(self.tags.node, &[k_prime, k2]);
213                        // Allocate E(union, new_node)
214                        self.alloc_con(self.tags.e, &[union_val, new_node])
215                    } else {
216                        std::ptr::null_mut()
217                    }
218                } else {
219                    // Unknown Con tag in continuation position — error
220                    std::ptr::null_mut()
221                }
222            }
223            t if t == layout::TAG_CLOSURE => {
224                // Raw closure (degenerate continuation fallback)
225                self.call_closure(k, arg)
226            }
227            _ => std::ptr::null_mut(),
228        }
229    }
230
231    /// Call a compiled closure: read code_ptr from closure[8], invoke it.
232    ///
233    /// # Safety
234    ///
235    /// `closure` must point to a valid Closure HeapObject.
236    unsafe fn call_closure(&mut self, closure: *mut u8, arg: *mut u8) -> *mut u8 {
237        let code_ptr = *(closure.add(layout::CLOSURE_CODE_PTR_OFFSET) as *const usize);
238
239        let trace = crate::debug::trace_level();
240        if trace >= crate::debug::TraceLevel::Calls {
241            let name = crate::debug::lookup_lambda(code_ptr)
242                .unwrap_or_else(|| format!("0x{:x}", code_ptr));
243            eprintln!(
244                "[trace] call_closure {} closure={:?} arg={}",
245                name,
246                closure,
247                crate::debug::heap_describe(arg),
248            );
249        }
250        if trace >= crate::debug::TraceLevel::Heap {
251            if let Err(e) = crate::debug::heap_validate_deep(closure) {
252                eprintln!("[trace] INVALID closure: {}", e);
253                eprintln!("[trace]   {}", crate::debug::heap_describe(closure));
254                return std::ptr::null_mut();
255            }
256            if let Err(e) = crate::debug::heap_validate(arg) {
257                eprintln!("[trace] INVALID arg: {}", e);
258                return std::ptr::null_mut();
259            }
260            // Dump captures
261            let num_captured = *(closure.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
262            for i in 0..num_captured as usize {
263                let cap = *(closure.add(layout::CLOSURE_CAPTURED_OFFSET + 8 * i) as *const *const u8);
264                if cap.is_null() {
265                    eprintln!("[trace]   capture[{}] = NULL", i);
266                } else {
267                    eprintln!("[trace]   capture[{}] = {}", i, crate::debug::heap_describe(cap));
268                }
269            }
270        }
271
272        let func: unsafe extern "C" fn(*mut VMContext, *mut u8, *mut u8) -> *mut u8 =
273            std::mem::transmute(code_ptr);
274        let result = func(&mut self.vmctx, closure, arg);
275
276        if trace >= crate::debug::TraceLevel::Calls {
277            let name = crate::debug::lookup_lambda(code_ptr)
278                .unwrap_or_else(|| format!("0x{:x}", code_ptr));
279            if result.is_null() {
280                eprintln!("[trace] {} returned NULL", name);
281            } else {
282                eprintln!("[trace] {} returned {}", name, crate::debug::heap_describe(result));
283            }
284        }
285
286        result
287    }
288
289    /// Allocate a Con HeapObject on the nursery with the given tag and fields.
290    unsafe fn alloc_con(&mut self, con_tag: u64, fields: &[*mut u8]) -> *mut u8 {
291        let size = 24 + 8 * fields.len();
292        let ptr = heap_bridge::bump_alloc_from_vmctx(&mut self.vmctx, size);
293        layout::write_header(ptr, layout::TAG_CON, size as u16);
294        *(ptr.add(layout::CON_TAG_OFFSET) as *mut u64) = con_tag;
295        *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *mut u16) = fields.len() as u16;
296        for (i, &fp) in fields.iter().enumerate() {
297            *(ptr.add(layout::CON_FIELDS_OFFSET + 8 * i) as *mut *mut u8) = fp;
298        }
299        ptr
300    }
301}