Skip to main content

lex_bytecode/
compiler.rs

1//! M4 compiler: canonical AST → bytecode.
2
3use crate::op::*;
4use crate::program::*;
5use indexmap::IndexMap;
6use lex_ast as a;
7
8#[derive(Default)]
9struct ConstPool {
10    pool: Vec<Const>,
11    fields: IndexMap<String, u32>,
12    variants: IndexMap<String, u32>,
13    node_ids: IndexMap<String, u32>,
14    ints: IndexMap<i64, u32>,
15    bools: IndexMap<u8, u32>,
16    strs: IndexMap<String, u32>,
17    /// Interned record field-name shapes (#461). Deduplicated by content
18    /// so a record literal with the same field-name layout reuses the
19    /// same `shape_idx` across the whole program — keeps the side-table
20    /// small even when the same struct is constructed in many places.
21    record_shapes: Vec<Vec<u32>>,
22    record_shape_dedup: IndexMap<Vec<u32>, u32>,
23}
24
25impl ConstPool {
26    fn field(&mut self, name: &str) -> u32 {
27        if let Some(i) = self.fields.get(name) { return *i; }
28        let i = self.pool.len() as u32;
29        self.pool.push(Const::FieldName(name.into()));
30        self.fields.insert(name.into(), i);
31        i
32    }
33    fn variant(&mut self, name: &str) -> u32 {
34        if let Some(i) = self.variants.get(name) { return *i; }
35        let i = self.pool.len() as u32;
36        self.pool.push(Const::VariantName(name.into()));
37        self.variants.insert(name.into(), i);
38        i
39    }
40    fn node_id(&mut self, name: &str) -> u32 {
41        if let Some(i) = self.node_ids.get(name) { return *i; }
42        let i = self.pool.len() as u32;
43        self.pool.push(Const::NodeId(name.into()));
44        self.node_ids.insert(name.into(), i);
45        i
46    }
47    fn int(&mut self, n: i64) -> u32 {
48        if let Some(i) = self.ints.get(&n) { return *i; }
49        let i = self.pool.len() as u32;
50        self.pool.push(Const::Int(n));
51        self.ints.insert(n, i);
52        i
53    }
54    fn bool(&mut self, b: bool) -> u32 {
55        let key = b as u8;
56        if let Some(i) = self.bools.get(&key) { return *i; }
57        let i = self.pool.len() as u32;
58        self.pool.push(Const::Bool(b));
59        self.bools.insert(key, i);
60        i
61    }
62    fn str(&mut self, s: &str) -> u32 {
63        if let Some(i) = self.strs.get(s) { return *i; }
64        let i = self.pool.len() as u32;
65        self.pool.push(Const::Str(s.into()));
66        self.strs.insert(s.into(), i);
67        i
68    }
69    fn float(&mut self, f: f64) -> u32 {
70        // Floats: not deduped (NaN issues).
71        let i = self.pool.len() as u32;
72        self.pool.push(Const::Float(f));
73        i
74    }
75    fn unit(&mut self) -> u32 {
76        let i = self.pool.len() as u32;
77        self.pool.push(Const::Unit);
78        i
79    }
80
81    /// Intern a record field-name shape (#461). Returns the index into
82    /// `record_shapes`; identical shapes (same field-name-index vec)
83    /// always return the same index.
84    fn record_shape(&mut self, idxs: Vec<u32>) -> u32 {
85        if let Some(i) = self.record_shape_dedup.get(&idxs) {
86            return *i;
87        }
88        let i = self.record_shapes.len() as u32;
89        self.record_shape_dedup.insert(idxs.clone(), i);
90        self.record_shapes.push(idxs);
91        i
92    }
93}
94
95pub fn compile_program(stages: &[a::Stage]) -> Program {
96    let mut p = Program {
97        constants: Vec::new(),
98        functions: Vec::new(),
99        function_names: IndexMap::new(),
100        module_aliases: IndexMap::new(),
101        entry: None,
102        record_shapes: Vec::new(),
103    };
104
105    // Collect imports as alias → module-name. The module name is the part
106    // after `std.` (so `import "std.io" as io` ⇒ alias `io` → module `io`).
107    for s in stages {
108        if let a::Stage::Import(i) = s {
109            let module = i.reference.strip_prefix("std.").unwrap_or(&i.reference).to_string();
110            p.module_aliases.insert(i.alias.clone(), module);
111        }
112    }
113
114    for s in stages {
115        if let a::Stage::FnDecl(fd) = s {
116            let idx = p.functions.len() as u32;
117            p.function_names.insert(fd.name.clone(), idx);
118            p.functions.push(Function {
119                name: fd.name.clone(),
120                arity: fd.params.len() as u16,
121                locals_count: 0,
122                code: Vec::new(),
123                effects: fd.effects.iter().map(|e| DeclaredEffect {
124                    kind: e.name.clone(),
125                    arg: e.arg.as_ref().map(|a| match a {
126                        a::EffectArg::Str { value } => EffectArg::Str(value.clone()),
127                        a::EffectArg::Int { value } => EffectArg::Int(*value),
128                        a::EffectArg::Ident { value } => EffectArg::Ident(value.clone()),
129                    }),
130                }).collect(),
131                // Filled in at the end of the compile pass, once `code`
132                // and `locals_count` are final. See #222.
133                body_hash: crate::program::ZERO_BODY_HASH,
134                // Per-param refinement predicates for runtime check
135                // (#209 slice 3). Lifted directly from each param's
136                // `TypeExpr::Refined` if present; `None` otherwise.
137                refinements: fd.params.iter().map(|p| match &p.ty {
138                    a::TypeExpr::Refined { binding, predicate, .. } =>
139                        Some(crate::program::Refinement {
140                            binding: binding.clone(),
141                            predicate: (**predicate).clone(),
142                        }),
143                    _ => None,
144                }).collect(),
145            });
146        }
147    }
148
149    let mut pool = ConstPool::default();
150    let function_names = p.function_names.clone();
151    let module_aliases = p.module_aliases.clone();
152    let mut pending_lambdas: Vec<PendingLambda> = Vec::new();
153
154    for s in stages {
155        if let a::Stage::FnDecl(_) = s {
156            // Build a NodeId map for *this* stage so the compiler can stamp
157            // each Call/EffectCall opcode with the originating AST node.
158            let id_map = lex_ast::expr_ids(s);
159            let fd = match s { a::Stage::FnDecl(fd) => fd, _ => unreachable!() };
160            let mut fc = FnCompiler {
161                code: Vec::new(),
162                locals: IndexMap::new(),
163                next_local: 0,
164                peak_local: 0,
165                pool: &mut pool,
166                function_names: &function_names,
167                module_aliases: &module_aliases,
168                id_map: &id_map,
169                pending_lambdas: &mut pending_lambdas,
170                next_fn_id: &mut p.functions,
171            };
172            for param in &fd.params {
173                let i = fc.next_local;
174                fc.locals.insert(param.name.clone(), i);
175                fc.next_local += 1;
176                fc.peak_local = fc.next_local;
177            }
178            fc.compile_expr(&fd.body, true);
179            fc.code.push(Op::Return);
180            let code = std::mem::take(&mut fc.code);
181            let peak = fc.peak_local;
182            drop(fc);
183            let idx = function_names[&fd.name];
184            p.functions[idx as usize].code = code;
185            p.functions[idx as usize].locals_count = peak;
186        }
187    }
188
189    // Compile pending lambdas in FIFO order. Each lambda may emit further
190    // lambdas; loop until the queue drains.
191    while let Some(pl) = pending_lambdas.pop() {
192        let id_map = std::collections::HashMap::new();
193        let mut fc = FnCompiler {
194            code: Vec::new(),
195            locals: IndexMap::new(),
196            next_local: 0,
197            peak_local: 0,
198            pool: &mut pool,
199            function_names: &function_names,
200            module_aliases: &module_aliases,
201            id_map: &id_map,
202            pending_lambdas: &mut pending_lambdas,
203            next_fn_id: &mut p.functions,
204        };
205        for name in &pl.capture_names {
206            let i = fc.next_local;
207            fc.locals.insert(name.clone(), i);
208            fc.next_local += 1;
209            fc.peak_local = fc.next_local;
210        }
211        for p in &pl.params {
212            let i = fc.next_local;
213            fc.locals.insert(p.name.clone(), i);
214            fc.next_local += 1;
215            fc.peak_local = fc.next_local;
216        }
217        fc.compile_expr(&pl.body, true);
218        fc.code.push(Op::Return);
219        let code = std::mem::take(&mut fc.code);
220        let peak = fc.peak_local;
221        drop(fc);
222        p.functions[pl.fn_id as usize].code = code;
223        p.functions[pl.fn_id as usize].locals_count = peak;
224    }
225
226    // Peephole pass (#461 superinstructions). Rewrites fusable opcode
227    // patterns into single dispatch steps. Runs before `body_hash`
228    // computation, but `compute_body_hash` decomposes each fused op
229    // back to its primitive form on hash — so closure identity (#222)
230    // is invariant under this pass and the order doesn't matter.
231    for f in p.functions.iter_mut() {
232        apply_peephole(&mut f.code, &pool.pool);
233    }
234
235    // Final pass: stamp every function with its content hash now that
236    // every body is finalized (#222). Trampolines installed via
237    // `install_trampoline` already have it; recomputing is cheap and
238    // makes the invariant easier to read at this top level.
239    for f in p.functions.iter_mut() {
240        if f.body_hash == crate::program::ZERO_BODY_HASH {
241            f.body_hash = crate::program::compute_body_hash(
242                f.arity, f.locals_count, &f.code, &pool.record_shapes);
243        }
244    }
245
246    p.constants = pool.pool;
247    p.record_shapes = pool.record_shapes;
248    p
249}
250
251/// Peephole pass: rewrite fusable opcode patterns into superinstructions
252/// (#461). Each fused op claims its own slot in the code stream; the
253/// trailing primitive ops it absorbs stay in place as inert
254/// "tombstones" — the dispatch loop overrides its default `pc += 1`
255/// to step past them. Leaving the tombstones in place keeps
256/// `code.len()` invariant and means we don't have to renumber jump
257/// offsets.
258///
259/// Pattern (slice 1): `LoadLocal(i), PushConst(c), IntAdd` where
260/// `constants[c]` is a `Const::Int`. Fused to
261/// `LoadLocalAddIntConst { local_idx: i, imm_const_idx: c }`.
262/// Safety: the second and third slots must not be reachable from
263/// any Jump / JumpIf / JumpIfNot — otherwise a jump would land on a
264/// tombstone instead of the live op the source intended. The
265/// pre-pass below collects every jump target in the function and
266/// skips fusion sites whose tombstones overlap one.
267fn apply_peephole(code: &mut [Op], constants: &[Const]) {
268    if code.len() < 3 { return; }
269
270    // Collect jump targets so we don't fuse across them.
271    let mut jump_targets = std::collections::HashSet::new();
272    for (pc, op) in code.iter().enumerate() {
273        let off = match op {
274            Op::Jump(off) | Op::JumpIf(off) | Op::JumpIfNot(off) => Some(*off),
275            _ => None,
276        };
277        if let Some(off) = off {
278            let target = (pc as i32 + 1 + off) as usize;
279            jump_targets.insert(target);
280        }
281    }
282
283    let n = code.len();
284    let mut k = 0;
285    while k + 2 < n {
286        if let (Op::LoadLocal(local_idx), Op::PushConst(imm_const_idx), Op::IntAdd)
287            = (code[k], code[k + 1], code[k + 2])
288        {
289            let imm_is_int = matches!(
290                constants.get(imm_const_idx as usize),
291                Some(Const::Int(_))
292            );
293            // Tombstones at k+1 and k+2 must not be jump targets;
294            // k itself can be a target (it stays a live op — the
295            // fused form executes the same semantics in one step).
296            let safe = imm_is_int
297                && !jump_targets.contains(&(k + 1))
298                && !jump_targets.contains(&(k + 2));
299            if safe {
300                code[k] = Op::LoadLocalAddIntConst { local_idx, imm_const_idx };
301                k += 3;
302                continue;
303            }
304        }
305        k += 1;
306    }
307}
308
309#[derive(Debug, Clone)]
310struct PendingLambda {
311    fn_id: u32,
312    /// Names of captured outer-scope locals, in order.
313    capture_names: Vec<String>,
314    params: Vec<a::Param>,
315    body: a::CExpr,
316}
317
318struct FnCompiler<'a> {
319    code: Vec<Op>,
320    locals: IndexMap<String, u16>,
321    next_local: u16,
322    /// Peak local usage seen during compilation (for VM frame sizing).
323    peak_local: u16,
324    pool: &'a mut ConstPool,
325    function_names: &'a IndexMap<String, u32>,
326    module_aliases: &'a IndexMap<String, String>,
327    /// CExpr address → NodeId, populated per stage via `lex_ast::expr_ids`.
328    id_map: &'a std::collections::HashMap<*const a::CExpr, lex_ast::NodeId>,
329    /// Queue of lambdas discovered during compilation; each gets a fresh
330    /// fn_id and is compiled in a later pass.
331    pending_lambdas: &'a mut Vec<PendingLambda>,
332    /// Mutable view of the function table — used to allocate fn_ids for
333    /// freshly-discovered lambdas.
334    next_fn_id: &'a mut Vec<Function>,
335}
336
337impl<'a> FnCompiler<'a> {
338    fn alloc_local(&mut self, name: &str) -> u16 {
339        let i = self.next_local;
340        self.locals.insert(name.into(), i);
341        self.next_local += 1;
342        if self.next_local > self.peak_local { self.peak_local = self.next_local; }
343        i
344    }
345    fn emit(&mut self, op: Op) { self.code.push(op); }
346
347    fn compile_expr(&mut self, e: &a::CExpr, tail: bool) {
348        match e {
349            a::CExpr::Literal { value } => self.compile_lit(value),
350            a::CExpr::Var { name } => {
351                if let Some(slot) = self.locals.get(name) {
352                    self.emit(Op::LoadLocal(*slot));
353                } else if let Some(&fn_id) = self.function_names.get(name) {
354                    // Function name used as a *value* (e.g. as a record-field
355                    // initializer or fold-callback arg) — materialize it as a
356                    // closure with no captures. The runtime already accepts
357                    // `Value::Closure { fn_id, captures: vec![] }` and
358                    // `CallClosure` dispatches it. (#169)
359                    self.emit(Op::MakeClosure { fn_id, capture_count: 0 });
360                } else {
361                    // Should be caught at type-check time; the type checker
362                    // walks every Var. If we land here it's a compiler bug,
363                    // not a user typo.
364                    panic!("unknown var in compiler: {name}");
365                }
366            }
367            a::CExpr::Let { name, ty: _, value, body } => {
368                self.compile_expr(value, false);
369                let slot = self.alloc_local(name);
370                self.emit(Op::StoreLocal(slot));
371                self.compile_expr(body, tail);
372            }
373            a::CExpr::Block { statements, result } => {
374                for s in statements {
375                    self.compile_expr(s, false);
376                    self.emit(Op::Pop);
377                }
378                self.compile_expr(result, tail);
379            }
380            a::CExpr::Call { callee, args } => self.compile_call(e, callee, args, tail),
381            a::CExpr::Constructor { name, args } => {
382                for a in args { self.compile_expr(a, false); }
383                let name_idx = self.pool.variant(name);
384                self.emit(Op::MakeVariant { name_idx, arity: args.len() as u16 });
385            }
386            a::CExpr::Match { scrutinee, arms } => self.compile_match(scrutinee, arms, tail),
387            a::CExpr::RecordLit { fields } => {
388                let mut idxs = Vec::with_capacity(fields.len());
389                for f in fields {
390                    self.compile_expr(&f.value, false);
391                    idxs.push(self.pool.field(&f.name));
392                }
393                let field_count = idxs.len() as u16;
394                let shape_idx = self.pool.record_shape(idxs);
395                self.emit(Op::MakeRecord { shape_idx, field_count });
396            }
397            a::CExpr::TupleLit { items } => {
398                for it in items { self.compile_expr(it, false); }
399                self.emit(Op::MakeTuple(items.len() as u16));
400            }
401            a::CExpr::ListLit { items } => {
402                for it in items { self.compile_expr(it, false); }
403                self.emit(Op::MakeList(items.len() as u32));
404            }
405            a::CExpr::FieldAccess { value, field } => {
406                self.compile_expr(value, false);
407                let idx = self.pool.field(field);
408                self.emit(Op::GetField(idx));
409            }
410            a::CExpr::BinOp { op, lhs, rhs } => self.compile_binop(op, lhs, rhs),
411            a::CExpr::UnaryOp { op, expr } => {
412                self.compile_expr(expr, false);
413                match op.as_str() {
414                    "-" => self.emit(Op::NumNeg),
415                    "not" => self.emit(Op::BoolNot),
416                    other => panic!("unknown unary: {other}"),
417                }
418            }
419            a::CExpr::Lambda { params, body, .. } => self.compile_lambda(params, body),
420            a::CExpr::Return { value } => {
421                self.compile_expr(value, true);
422                self.emit(Op::Return);
423            }
424        }
425    }
426
427    fn compile_lit(&mut self, l: &a::CLit) {
428        let i = match l {
429            a::CLit::Int { value } => self.pool.int(*value),
430            a::CLit::Bool { value } => self.pool.bool(*value),
431            a::CLit::Float { value } => {
432                let f: f64 = value.parse().unwrap_or(0.0);
433                self.pool.float(f)
434            }
435            a::CLit::Str { value } => self.pool.str(value),
436            a::CLit::Bytes { value: _ } => {
437                // Stub: M4 doesn't use bytes literals in §3.13 examples.
438                let i = self.pool.pool.len() as u32;
439                self.pool.pool.push(Const::Bytes(Vec::new()));
440                i
441            }
442            a::CLit::Unit => self.pool.unit(),
443        };
444        self.emit(Op::PushConst(i));
445    }
446
447    fn compile_call(&mut self, call_expr: &a::CExpr, callee: &a::CExpr, args: &[a::CExpr], tail: bool) {
448        let node_id = self
449            .id_map
450            .get(&(call_expr as *const a::CExpr))
451            .map(|n| n.as_str().to_string())
452            .unwrap_or_else(|| "n_?".into());
453        let node_id_idx = self.pool.node_id(&node_id);
454
455        // Module function call: `alias.op(args)` where `alias` is an imported
456        // module ⇒ EffectCall, except for higher-order pure ops where we
457        // emit inline bytecode using CallClosure (the closure-arg can't be
458        // serialized through the effect handler).
459        if let a::CExpr::FieldAccess { value, field } = callee {
460            if let a::CExpr::Var { name } = value.as_ref() {
461                if let Some(module) = self.module_aliases.get(name) {
462                    if self.try_emit_higher_order(module, field, args, node_id_idx) {
463                        let _ = tail;
464                        return;
465                    }
466                    for a in args { self.compile_expr(a, false); }
467                    let kind_idx = self.pool.str(module);
468                    let op_idx = self.pool.str(field);
469                    self.emit(Op::EffectCall {
470                        kind_idx,
471                        op_idx,
472                        arity: args.len() as u16,
473                        node_id_idx,
474                    });
475                    let _ = tail;
476                    return;
477                }
478            }
479        }
480        match callee {
481            a::CExpr::Var { name } if self.function_names.contains_key(name) => {
482                for a in args { self.compile_expr(a, false); }
483                let fn_id = self.function_names[name];
484                if tail {
485                    self.emit(Op::TailCall { fn_id, arity: args.len() as u16, node_id_idx });
486                } else {
487                    self.emit(Op::Call { fn_id, arity: args.len() as u16, node_id_idx });
488                }
489            }
490            a::CExpr::Var { name } if self.locals.contains_key(name) => {
491                // First-class function value bound to a local. Push the
492                // closure, then args, then CallClosure.
493                let slot = self.locals[name];
494                self.emit(Op::LoadLocal(slot));
495                for a in args { self.compile_expr(a, false); }
496                self.emit(Op::CallClosure { arity: args.len() as u16, node_id_idx });
497            }
498            // Lambda directly applied — push closure + args + CallClosure.
499            other => {
500                self.compile_expr(other, false);
501                for a in args { self.compile_expr(a, false); }
502                self.emit(Op::CallClosure { arity: args.len() as u16, node_id_idx });
503            }
504        }
505    }
506
507    fn compile_binop(&mut self, op: &str, lhs: &a::CExpr, rhs: &a::CExpr) {
508        self.compile_expr(lhs, false);
509        self.compile_expr(rhs, false);
510        match op {
511            "+" => self.emit(Op::NumAdd),
512            "-" => self.emit(Op::NumSub),
513            "*" => self.emit(Op::NumMul),
514            "/" => self.emit(Op::NumDiv),
515            "%" => self.emit(Op::NumMod),
516            "==" => self.emit(Op::NumEq),
517            "!=" => { self.emit(Op::NumEq); self.emit(Op::BoolNot); }
518            "<" => self.emit(Op::NumLt),
519            "<=" => self.emit(Op::NumLe),
520            ">" => { self.emit_swap_top2(); self.emit(Op::NumLt); }
521            ">=" => { self.emit_swap_top2(); self.emit(Op::NumLe); }
522            "and" => self.emit(Op::BoolAnd),
523            "or" => self.emit(Op::BoolOr),
524            other => panic!("unknown binop: {other:?}"),
525        }
526    }
527
528    fn emit_swap_top2(&mut self) {
529        let a = self.alloc_local("__swap_a");
530        let b = self.alloc_local("__swap_b");
531        self.emit(Op::StoreLocal(b));
532        self.emit(Op::StoreLocal(a));
533        self.emit(Op::LoadLocal(b));
534        self.emit(Op::LoadLocal(a));
535    }
536
537    fn compile_match(&mut self, scrutinee: &a::CExpr, arms: &[a::Arm], tail: bool) {
538        self.compile_expr(scrutinee, false);
539        let scrut_slot = self.alloc_local("__scrut");
540        self.emit(Op::StoreLocal(scrut_slot));
541
542        let mut end_jumps: Vec<usize> = Vec::new();
543        for arm in arms {
544            let arm_start_locals = self.next_local;
545            let arm_start_locals_map = self.locals.clone();
546
547            self.emit(Op::LoadLocal(scrut_slot));
548            let mut bindings: Vec<(String, u16)> = Vec::new();
549            let fail_jumps: Vec<usize> = self.compile_pattern_test(&arm.pattern, &mut bindings);
550
551            self.compile_expr(&arm.body, tail);
552            let j_end = self.code.len();
553            self.emit(Op::Jump(0));
554            end_jumps.push(j_end);
555
556            let fail_target = self.code.len() as i32;
557            for j in fail_jumps {
558                // #337: PConstructor patterns now register an
559                // unconditional `Op::Jump` for the failure path
560                // (alongside the existing `Op::JumpIfNot` from
561                // PLiteral / nested constructor tests). Patch
562                // either shape.
563                match &mut self.code[j] {
564                    Op::JumpIfNot(off) => *off = fail_target - (j as i32 + 1),
565                    Op::Jump(off)      => *off = fail_target - (j as i32 + 1),
566                    _ => {}
567                }
568            }
569            self.next_local = arm_start_locals;
570            self.locals = arm_start_locals_map;
571        }
572        let panic_msg_idx = self.pool.str("non-exhaustive match");
573        self.emit(Op::Panic(panic_msg_idx));
574
575        let end_target = self.code.len() as i32;
576        for j in end_jumps {
577            if let Op::Jump(off) = &mut self.code[j] {
578                *off = end_target - (j as i32 + 1);
579            }
580        }
581    }
582
583    fn compile_pattern_test(&mut self, p: &a::Pattern, bindings: &mut Vec<(String, u16)>) -> Vec<usize> {
584        let mut fails = Vec::new();
585        match p {
586            a::Pattern::PWild => { self.emit(Op::Pop); }
587            a::Pattern::PVar { name } => {
588                let slot = self.alloc_local(name);
589                self.emit(Op::StoreLocal(slot));
590                bindings.push((name.clone(), slot));
591            }
592            a::Pattern::PLiteral { value } => {
593                self.compile_lit(value);
594                match value {
595                    a::CLit::Str { .. } => self.emit(Op::StrEq),
596                    a::CLit::Bytes { .. } => self.emit(Op::BytesEq),
597                    _ => self.emit(Op::NumEq),
598                }
599                let j = self.code.len();
600                self.emit(Op::JumpIfNot(0));
601                fails.push(j);
602            }
603            a::Pattern::PConstructor { name, args } => {
604                let name_idx = self.pool.variant(name);
605                // #337: the failure path must drop the duplicated
606                // scrutinee so subsequent match arms see a clean
607                // stack. The previous shape
608                //   Dup; TestVariant; JumpIfNot(fail);
609                // left `[scrut]` on the stack at the fail target,
610                // poisoning later arms — e.g. a wildcard `_` arm
611                // whose body referenced an unrelated value would
612                // pop the leaked scrutinee instead of its own value.
613                //
614                // New shape: branch on success, fall through to a
615                // failure cleanup that pops the dup'd scrutinee
616                // before jumping. The registered fail-jump is an
617                // unconditional `Op::Jump`; `compile_match`'s patch
618                // loop accepts both `JumpIfNot` and `Jump`.
619                self.emit(Op::Dup);                   // [scrut, scrut]
620                self.emit(Op::TestVariant(name_idx)); // [scrut, Bool]
621                let j_success = self.code.len();
622                self.emit(Op::JumpIf(0));             // pop Bool. success → [scrut]
623                self.emit(Op::Pop);                   // failure cleanup: [scrut] → []
624                let j_fail = self.code.len();
625                self.emit(Op::Jump(0));               // → fail target with []
626                fails.push(j_fail);
627                let success_target = self.code.len() as i32;
628                if let Op::JumpIf(off) = &mut self.code[j_success] {
629                    *off = success_target - (j_success as i32 + 1);
630                }
631                if args.is_empty() {
632                    self.emit(Op::Pop);
633                } else if args.len() == 1 {
634                    self.emit(Op::GetVariantArg(0));
635                    let sub_fails = self.compile_pattern_test(&args[0], bindings);
636                    fails.extend(sub_fails);
637                } else {
638                    let slot = self.alloc_local("__variant");
639                    self.emit(Op::StoreLocal(slot));
640                    for (i, arg) in args.iter().enumerate() {
641                        self.emit(Op::LoadLocal(slot));
642                        self.emit(Op::GetVariantArg(i as u16));
643                        let sub_fails = self.compile_pattern_test(arg, bindings);
644                        fails.extend(sub_fails);
645                    }
646                }
647            }
648            a::Pattern::PRecord { fields } => {
649                let slot = self.alloc_local("__record");
650                self.emit(Op::StoreLocal(slot));
651                for f in fields {
652                    self.emit(Op::LoadLocal(slot));
653                    let fi = self.pool.field(&f.name);
654                    self.emit(Op::GetField(fi));
655                    let sub_fails = self.compile_pattern_test(&f.pattern, bindings);
656                    fails.extend(sub_fails);
657                }
658            }
659            a::Pattern::PTuple { items } => {
660                let slot = self.alloc_local("__tuple");
661                self.emit(Op::StoreLocal(slot));
662                for (i, item) in items.iter().enumerate() {
663                    self.emit(Op::LoadLocal(slot));
664                    self.emit(Op::GetElem(i as u16));
665                    let sub_fails = self.compile_pattern_test(item, bindings);
666                    fails.extend(sub_fails);
667                }
668            }
669        }
670        fails
671    }
672
673    /// Compile a Lambda: collect free variables that resolve to outer-scope
674    /// locals, register a synthetic function, emit MakeClosure with the
675    /// captured values pushed in order.
676    fn compile_lambda(&mut self, params: &[a::Param], body: &a::CExpr) {
677        // Free vars = vars referenced in body that aren't bound locally.
678        let mut bound: std::collections::HashSet<String> = params.iter().map(|p| p.name.clone()).collect();
679        let mut frees: Vec<String> = Vec::new();
680        free_vars(body, &mut bound, &mut frees);
681
682        // Filter to those that are in the enclosing locals (captures).
683        // Don't exclude names that *also* exist in `function_names`:
684        // if the name is in `locals`, the local shadows the global
685        // within this scope, and the lambda needs to capture the
686        // local's value, not the global fn. (#339) Names that are
687        // ONLY in `function_names` (no local) stay external — the
688        // lambda's body resolves them at call time, same as the
689        // enclosing fn would.
690        let captures: Vec<String> = frees.into_iter()
691            .filter(|n| self.locals.contains_key(n))
692            .collect();
693
694        // Allocate a fresh fn_id by appending a placeholder Function.
695        let fn_id = self.next_fn_id.len() as u32;
696        self.next_fn_id.push(Function {
697            name: format!("__lambda_{fn_id}"),
698            arity: (captures.len() + params.len()) as u16,
699            locals_count: 0,
700            code: Vec::new(),
701            effects: Vec::new(),
702            // See #222: filled in at the end of the compile pass.
703            body_hash: crate::program::ZERO_BODY_HASH,
704            // Lambdas don't carry refinements at the surface today
705            // (closure params don't accept `Type{x | ...}` syntax in
706            // the parser). #209 stays focused on top-level fn decls;
707            // closure-param refinements are a follow-up.
708            refinements: Vec::new(),
709        });
710
711        // Emit code at the lambda site: load each captured local, then MakeClosure.
712        for c in &captures {
713            let slot = *self.locals.get(c).expect("free var must be in scope");
714            self.emit(Op::LoadLocal(slot));
715        }
716        self.emit(Op::MakeClosure { fn_id, capture_count: captures.len() as u16 });
717
718        // Queue the body for later compilation.
719        self.pending_lambdas.push(PendingLambda {
720            fn_id,
721            capture_names: captures,
722            params: params.to_vec(),
723            body: body.clone(),
724        });
725    }
726
727    /// Higher-order stdlib ops on Result/Option whose function arg is a
728    /// closure. Emit inline: pattern-match on the variant, invoke the
729    /// closure when applicable, return wrapped result.
730    fn try_emit_higher_order(
731        &mut self,
732        module: &str,
733        op: &str,
734        args: &[a::CExpr],
735        node_id_idx: u32,
736    ) -> bool {
737        match (module, op) {
738            ("result", "map") => self.emit_variant_map(args, "Ok", true),
739            ("result", "and_then") => self.emit_variant_map(args, "Ok", false),
740            ("result", "map_err") => self.emit_variant_map(args, "Err", true),
741            ("result", "or_else") => self.emit_variant_or_else(args, "Err", 1),
742            ("option", "map") => self.emit_variant_map(args, "Some", true),
743            ("option", "and_then") => self.emit_variant_map(args, "Some", false),
744            ("option", "or_else") => self.emit_variant_or_else(args, "None", 0),
745            ("option", "unwrap_or_else") => self.emit_option_unwrap_or_else(args),
746            ("list", "map") => self.emit_list_map(args),
747            ("list", "par_map") => self.emit_list_par_map(args),
748            ("list", "sort_by") => self.emit_list_sort_by(args),
749            ("list", "filter") => self.emit_list_filter(args),
750            ("list", "fold") => self.emit_list_fold(args),
751            ("iter", "from_list") => self.emit_iter_from_list(args),
752            ("iter", "unfold")    => self.emit_iter_unfold(args),
753            ("iter", "next")      => self.emit_iter_next(args),
754            ("iter", "is_empty")  => self.emit_iter_is_empty(args),
755            ("iter", "count")     => self.emit_iter_count(args),
756            ("iter", "take")      => self.emit_iter_take(args),
757            ("iter", "skip")      => self.emit_iter_skip(args),
758            ("iter", "to_list")   => self.emit_iter_to_list(args),
759            ("iter", "map")       => self.emit_iter_map(args),
760            ("iter", "filter")    => self.emit_iter_filter(args),
761            ("iter", "fold")      => self.emit_iter_fold(args),
762            ("map", "fold") => self.emit_map_fold(args, node_id_idx),
763            ("flow", "sequential") => self.emit_flow_sequential(args),
764            ("flow", "branch") => self.emit_flow_branch(args),
765            ("flow", "retry") => self.emit_flow_retry(args),
766            ("flow", "retry_with_backoff") => self.emit_flow_retry_with_backoff(args),
767            ("flow", "parallel") => self.emit_flow_parallel(args),
768            ("flow", "parallel_list") => self.emit_flow_parallel_list(args),
769            _ => return false,
770        }
771        true
772    }
773
774    /// `list.map(xs, f)` — inline loop applying `f` to each element.
775    /// Stack contract: pushes the resulting List.
776    fn emit_list_map(&mut self, args: &[a::CExpr]) {
777        // Compile xs and f, store both as locals.
778        self.compile_expr(&args[0], false);
779        let xs = self.alloc_local("__lm_xs");
780        self.emit(Op::StoreLocal(xs));
781        self.compile_expr(&args[1], false);
782        let f = self.alloc_local("__lm_f");
783        self.emit(Op::StoreLocal(f));
784
785        // out := []
786        self.emit(Op::MakeList(0));
787        let out = self.alloc_local("__lm_out");
788        self.emit(Op::StoreLocal(out));
789
790        // i := 0
791        let zero = self.pool.int(0);
792        self.emit(Op::PushConst(zero));
793        let i = self.alloc_local("__lm_i");
794        self.emit(Op::StoreLocal(i));
795
796        // loop_top: while i < len(xs) { ... }
797        let loop_top = self.code.len();
798        self.emit(Op::LoadLocal(i));
799        self.emit(Op::LoadLocal(xs));
800        self.emit(Op::GetListLen);
801        self.emit(Op::NumLt);
802        let j_exit = self.code.len();
803        self.emit(Op::JumpIfNot(0));
804
805        // body: out := out ++ [f(xs[i])]
806        let nid = self.pool.node_id("n_list_map");
807        self.emit(Op::LoadLocal(out));
808        self.emit(Op::LoadLocal(f));
809        self.emit(Op::LoadLocal(xs));
810        self.emit(Op::LoadLocal(i));
811        self.emit(Op::GetListElemDyn);
812        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
813        self.emit(Op::ListAppend);
814        self.emit(Op::StoreLocal(out));
815
816        // i := i + 1
817        self.emit(Op::LoadLocal(i));
818        let one = self.pool.int(1);
819        self.emit(Op::PushConst(one));
820        self.emit(Op::NumAdd);
821        self.emit(Op::StoreLocal(i));
822
823        // jump back
824        let jump_back = self.code.len();
825        let back = (loop_top as i32) - (jump_back as i32 + 1);
826        self.emit(Op::Jump(back));
827
828        // exit: patch j_exit, push out
829        let exit_target = self.code.len() as i32;
830        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
831            *off = exit_target - (j_exit as i32 + 1);
832        }
833        self.emit(Op::LoadLocal(out));
834    }
835
836    /// `list.par_map(xs, f)` (#305 slice 1). Pushes `xs` and `f`,
837    /// then emits a single `Op::ParallelMap` — the VM applies `f`
838    /// to each element on OS-thread tasks, capped by
839    /// `LEX_PAR_MAX_CONCURRENCY`. Returns the result list in input
840    /// order.
841    fn emit_list_par_map(&mut self, args: &[a::CExpr]) {
842        self.compile_expr(&args[0], false);
843        self.compile_expr(&args[1], false);
844        let nid = self.pool.node_id("n_list_par_map");
845        self.emit(Op::ParallelMap { node_id_idx: nid });
846    }
847
848    /// `list.sort_by(xs, f)` (#338). Pushes `xs` and the key-fn
849    /// `f`, then emits a single `Op::SortByKey` — the VM invokes
850    /// `f` on each element to derive a sortable key, stable-sorts
851    /// by key, and returns the values in sorted order. Keys must
852    /// resolve to `Int` / `Float` / `Str`; mixed-type pairs are
853    /// treated as equal by the comparator (preserving insertion
854    /// order via the stable sort).
855    fn emit_list_sort_by(&mut self, args: &[a::CExpr]) {
856        self.compile_expr(&args[0], false);
857        self.compile_expr(&args[1], false);
858        let nid = self.pool.node_id("n_list_sort_by");
859        self.emit(Op::SortByKey { node_id_idx: nid });
860    }
861
862    /// `list.filter(xs, pred)` — keep elements where pred returns true.
863    fn emit_list_filter(&mut self, args: &[a::CExpr]) {
864        self.compile_expr(&args[0], false);
865        let xs = self.alloc_local("__lf_xs");
866        self.emit(Op::StoreLocal(xs));
867        self.compile_expr(&args[1], false);
868        let f = self.alloc_local("__lf_f");
869        self.emit(Op::StoreLocal(f));
870
871        self.emit(Op::MakeList(0));
872        let out = self.alloc_local("__lf_out");
873        self.emit(Op::StoreLocal(out));
874
875        let zero = self.pool.int(0);
876        self.emit(Op::PushConst(zero));
877        let i = self.alloc_local("__lf_i");
878        self.emit(Op::StoreLocal(i));
879
880        let loop_top = self.code.len();
881        self.emit(Op::LoadLocal(i));
882        self.emit(Op::LoadLocal(xs));
883        self.emit(Op::GetListLen);
884        self.emit(Op::NumLt);
885        let j_exit = self.code.len();
886        self.emit(Op::JumpIfNot(0));
887
888        // x := xs[i]
889        self.emit(Op::LoadLocal(xs));
890        self.emit(Op::LoadLocal(i));
891        self.emit(Op::GetListElemDyn);
892        let x = self.alloc_local("__lf_x");
893        self.emit(Op::StoreLocal(x));
894
895        // if pred(x) { out := out ++ [x] }
896        let nid = self.pool.node_id("n_list_filter");
897        self.emit(Op::LoadLocal(f));
898        self.emit(Op::LoadLocal(x));
899        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
900        let j_skip = self.code.len();
901        self.emit(Op::JumpIfNot(0));
902
903        self.emit(Op::LoadLocal(out));
904        self.emit(Op::LoadLocal(x));
905        self.emit(Op::ListAppend);
906        self.emit(Op::StoreLocal(out));
907
908        let skip_target = self.code.len() as i32;
909        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
910            *off = skip_target - (j_skip as i32 + 1);
911        }
912
913        // i := i + 1
914        self.emit(Op::LoadLocal(i));
915        let one = self.pool.int(1);
916        self.emit(Op::PushConst(one));
917        self.emit(Op::NumAdd);
918        self.emit(Op::StoreLocal(i));
919
920        let jump_back = self.code.len();
921        let back = (loop_top as i32) - (jump_back as i32 + 1);
922        self.emit(Op::Jump(back));
923
924        let exit_target = self.code.len() as i32;
925        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
926            *off = exit_target - (j_exit as i32 + 1);
927        }
928        self.emit(Op::LoadLocal(out));
929    }
930
931    /// `list.fold(xs, init, f)` — left fold with two-arg combiner.
932    fn emit_list_fold(&mut self, args: &[a::CExpr]) {
933        // args: xs, init, f
934        self.compile_expr(&args[0], false);
935        let xs = self.alloc_local("__lo_xs");
936        self.emit(Op::StoreLocal(xs));
937        self.compile_expr(&args[1], false);
938        let acc = self.alloc_local("__lo_acc");
939        self.emit(Op::StoreLocal(acc));
940        self.compile_expr(&args[2], false);
941        let f = self.alloc_local("__lo_f");
942        self.emit(Op::StoreLocal(f));
943
944        let zero = self.pool.int(0);
945        self.emit(Op::PushConst(zero));
946        let i = self.alloc_local("__lo_i");
947        self.emit(Op::StoreLocal(i));
948
949        let loop_top = self.code.len();
950        self.emit(Op::LoadLocal(i));
951        self.emit(Op::LoadLocal(xs));
952        self.emit(Op::GetListLen);
953        self.emit(Op::NumLt);
954        let j_exit = self.code.len();
955        self.emit(Op::JumpIfNot(0));
956
957        // acc := f(acc, xs[i])
958        let nid = self.pool.node_id("n_list_fold");
959        self.emit(Op::LoadLocal(f));
960        self.emit(Op::LoadLocal(acc));
961        self.emit(Op::LoadLocal(xs));
962        self.emit(Op::LoadLocal(i));
963        self.emit(Op::GetListElemDyn);
964        self.emit(Op::CallClosure { arity: 2, node_id_idx: nid });
965        self.emit(Op::StoreLocal(acc));
966
967        // i := i + 1
968        self.emit(Op::LoadLocal(i));
969        let one = self.pool.int(1);
970        self.emit(Op::PushConst(one));
971        self.emit(Op::NumAdd);
972        self.emit(Op::StoreLocal(i));
973
974        let jump_back = self.code.len();
975        let back = (loop_top as i32) - (jump_back as i32 + 1);
976        self.emit(Op::Jump(back));
977
978        let exit_target = self.code.len() as i32;
979        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
980            *off = exit_target - (j_exit as i32 + 1);
981        }
982        self.emit(Op::LoadLocal(acc));
983    }
984
985    // ── Iter[T] operations (#364) ─────────────────────────────────────────
986    // Internal representation: `Value::Variant("__IterEager", [list, idx])`
987    // for the eager form (a List backing store + Int cursor) and
988    // `Value::Variant("__IterLazy", [seed, step_closure])` for the lazy form
989    // produced by `iter.unfold` (#376). Both are tagged variants so each op
990    // can `TestVariant` at runtime to dispatch. The names start with `__` so
991    // they can't be written by user code (uppercase ASCII-letter is required
992    // for constructor names, and the underscores keep them out of the
993    // user-namespace by convention).
994
995    /// `iter.from_list(xs)` — wrap a list in an eager iterator at position 0.
996    fn emit_iter_from_list(&mut self, args: &[a::CExpr]) {
997        self.compile_expr(&args[0], false);
998        let zero = self.pool.int(0);
999        self.emit(Op::PushConst(zero));
1000        let v = self.pool.variant("__IterEager");
1001        self.emit(Op::MakeVariant { name_idx: v, arity: 2 });
1002    }
1003
1004    /// `iter.next(it)` — advance one step; returns `Option[(T, Iter[T])]`.
1005    ///
1006    /// Dispatches on the iter's variant tag:
1007    /// - `__IterLazy(seed, step)` (#376) → invoke `step(seed)`. On
1008    ///   `Some((t, s'))` wrap as `Some((t, __IterLazy(s', step)))`; on
1009    ///   `None` propagate `None`. The seed advances forward each call.
1010    /// - `__IterCursor(handle)` (#379) → effect-call `sql.cursor_next(handle)`
1011    ///   which returns `Option[T]`. On `Some(row)` wrap as
1012    ///   `Some((row, __IterCursor(handle)))`; on `None` propagate. Handle
1013    ///   stays stable across calls — state is server-side / mpsc-buffered.
1014    /// - `__IterEager(list, idx)` → existing positional cursor.
1015    fn emit_iter_next(&mut self, args: &[a::CExpr]) {
1016        self.compile_expr(&args[0], false);
1017        let it = self.alloc_local("__in_it");
1018        self.emit(Op::StoreLocal(it));
1019
1020        // Dispatch: TestVariant pops; we Dup to keep the iter around.
1021        self.emit(Op::LoadLocal(it));
1022        self.emit(Op::Dup);
1023        let lazy_name = self.pool.variant("__IterLazy");
1024        self.emit(Op::TestVariant(lazy_name));
1025        let j_to_check_cursor = self.code.len();
1026        self.emit(Op::JumpIfNot(0));
1027
1028        // ── lazy path ────────────────────────────────────────────────
1029        // The Dup'd iter is on stack but we've consumed it via TestVariant,
1030        // so reload from the local.
1031        self.emit(Op::LoadLocal(it));
1032        self.emit(Op::GetVariantArg(0)); // seed
1033        let seed = self.alloc_local("__in_seed");
1034        self.emit(Op::StoreLocal(seed));
1035
1036        self.emit(Op::LoadLocal(it));
1037        self.emit(Op::GetVariantArg(1)); // step closure
1038        let step = self.alloc_local("__in_step");
1039        self.emit(Op::StoreLocal(step));
1040
1041        // Call step(seed) → Option[(T, S)].
1042        let nid_lazy = self.pool.node_id("n_iter_next_lazy");
1043        self.emit(Op::LoadLocal(step));
1044        self.emit(Op::LoadLocal(seed));
1045        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid_lazy });
1046        let opt = self.alloc_local("__in_opt");
1047        self.emit(Op::StoreLocal(opt));
1048
1049        // If `step` returned None, propagate it directly.
1050        self.emit(Op::LoadLocal(opt));
1051        let some_name = self.pool.variant("Some");
1052        self.emit(Op::TestVariant(some_name));
1053        let j_lazy_none = self.code.len();
1054        self.emit(Op::JumpIfNot(0));
1055
1056        // Some((t, new_seed)) — extract the inner tuple, repackage as
1057        // Some((t, __IterLazy(new_seed, step))) so the next call advances.
1058        self.emit(Op::LoadLocal(opt));
1059        self.emit(Op::GetVariantArg(0));     // (t, new_seed)
1060        let pair = self.alloc_local("__in_pair");
1061        self.emit(Op::StoreLocal(pair));
1062
1063        self.emit(Op::LoadLocal(pair));
1064        self.emit(Op::GetElem(0));           // t
1065        self.emit(Op::LoadLocal(pair));
1066        self.emit(Op::GetElem(1));           // new_seed
1067        self.emit(Op::LoadLocal(step));      // step closure
1068        let lazy_v = self.pool.variant("__IterLazy");
1069        self.emit(Op::MakeVariant { name_idx: lazy_v, arity: 2 }); // __IterLazy(new_seed, step)
1070        self.emit(Op::MakeTuple(2));         // (t, new_iter)
1071        let some_v = self.pool.variant("Some");
1072        self.emit(Op::MakeVariant { name_idx: some_v, arity: 1 });
1073        let j_after_lazy = self.code.len();
1074        self.emit(Op::Jump(0));
1075
1076        // Lazy → None: just forward the None.
1077        let none_t = self.code.len() as i32;
1078        if let Op::JumpIfNot(off) = &mut self.code[j_lazy_none] {
1079            *off = none_t - (j_lazy_none as i32 + 1);
1080        }
1081        let none_v = self.pool.variant("None");
1082        self.emit(Op::MakeVariant { name_idx: none_v, arity: 0 });
1083        let j_after_lazy_none = self.code.len();
1084        self.emit(Op::Jump(0));
1085
1086        // ── cursor path (#379) ───────────────────────────────────────
1087        let cursor_check_t = self.code.len() as i32;
1088        if let Op::JumpIfNot(off) = &mut self.code[j_to_check_cursor] {
1089            *off = cursor_check_t - (j_to_check_cursor as i32 + 1);
1090        }
1091
1092        self.emit(Op::LoadLocal(it));
1093        self.emit(Op::Dup);
1094        let cursor_name = self.pool.variant("__IterCursor");
1095        self.emit(Op::TestVariant(cursor_name));
1096        let j_to_eager = self.code.len();
1097        self.emit(Op::JumpIfNot(0));
1098
1099        // Cursor path: extract handle, effect-call sql.cursor_next(handle).
1100        // The handler returns Option[T] directly. We then wrap as
1101        // Some((T, __IterCursor(handle))) or forward None.
1102        self.emit(Op::LoadLocal(it));
1103        self.emit(Op::GetVariantArg(0));     // handle
1104        let handle = self.alloc_local("__in_handle");
1105        self.emit(Op::StoreLocal(handle));
1106
1107        let kind_idx = self.pool.str("sql");
1108        let op_idx = self.pool.str("cursor_next");
1109        let nid_cursor = self.pool.node_id("n_iter_next_cursor");
1110        self.emit(Op::LoadLocal(handle));
1111        self.emit(Op::EffectCall {
1112            kind_idx,
1113            op_idx,
1114            arity: 1,
1115            node_id_idx: nid_cursor,
1116        });
1117        let cur_opt = self.alloc_local("__in_cur_opt");
1118        self.emit(Op::StoreLocal(cur_opt));
1119
1120        self.emit(Op::LoadLocal(cur_opt));
1121        let some_c = self.pool.variant("Some");
1122        self.emit(Op::TestVariant(some_c));
1123        let j_cursor_none = self.code.len();
1124        self.emit(Op::JumpIfNot(0));
1125
1126        // Some(row): build Some((row, __IterCursor(handle)))
1127        self.emit(Op::LoadLocal(cur_opt));
1128        self.emit(Op::GetVariantArg(0));     // row
1129        self.emit(Op::LoadLocal(handle));
1130        let cursor_v = self.pool.variant("__IterCursor");
1131        self.emit(Op::MakeVariant { name_idx: cursor_v, arity: 1 });
1132        self.emit(Op::MakeTuple(2));         // (row, __IterCursor(handle))
1133        let some_c2 = self.pool.variant("Some");
1134        self.emit(Op::MakeVariant { name_idx: some_c2, arity: 1 });
1135        let j_after_cursor = self.code.len();
1136        self.emit(Op::Jump(0));
1137
1138        // Cursor → None
1139        let cursor_none_t = self.code.len() as i32;
1140        if let Op::JumpIfNot(off) = &mut self.code[j_cursor_none] {
1141            *off = cursor_none_t - (j_cursor_none as i32 + 1);
1142        }
1143        let none_c = self.pool.variant("None");
1144        self.emit(Op::MakeVariant { name_idx: none_c, arity: 0 });
1145        let j_after_cursor_none = self.code.len();
1146        self.emit(Op::Jump(0));
1147
1148        // ── eager path ───────────────────────────────────────────────
1149        let eager_t = self.code.len() as i32;
1150        if let Op::JumpIfNot(off) = &mut self.code[j_to_eager] {
1151            *off = eager_t - (j_to_eager as i32 + 1);
1152        }
1153
1154        self.emit(Op::LoadLocal(it));
1155        self.emit(Op::GetVariantArg(0));
1156        let list = self.alloc_local("__in_list");
1157        self.emit(Op::StoreLocal(list));
1158
1159        self.emit(Op::LoadLocal(it));
1160        self.emit(Op::GetVariantArg(1));
1161        let idx = self.alloc_local("__in_idx");
1162        self.emit(Op::StoreLocal(idx));
1163
1164        // if idx < len(list)
1165        self.emit(Op::LoadLocal(idx));
1166        self.emit(Op::LoadLocal(list));
1167        self.emit(Op::GetListLen);
1168        self.emit(Op::NumLt);
1169        let j_eager_else = self.code.len();
1170        self.emit(Op::JumpIfNot(0));
1171
1172        // Some((item, __IterEager(list, idx+1)))
1173        self.emit(Op::LoadLocal(list));
1174        self.emit(Op::LoadLocal(idx));
1175        self.emit(Op::GetListElemDyn);
1176
1177        self.emit(Op::LoadLocal(list));
1178        self.emit(Op::LoadLocal(idx));
1179        let one = self.pool.int(1);
1180        self.emit(Op::PushConst(one));
1181        self.emit(Op::NumAdd);
1182        let eager_v = self.pool.variant("__IterEager");
1183        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1184        self.emit(Op::MakeTuple(2));
1185        let some_e = self.pool.variant("Some");
1186        self.emit(Op::MakeVariant { name_idx: some_e, arity: 1 });
1187        let j_after_eager = self.code.len();
1188        self.emit(Op::Jump(0));
1189
1190        // Eager → None
1191        let eager_none_t = self.code.len() as i32;
1192        if let Op::JumpIfNot(off) = &mut self.code[j_eager_else] {
1193            *off = eager_none_t - (j_eager_else as i32 + 1);
1194        }
1195        let none_e = self.pool.variant("None");
1196        self.emit(Op::MakeVariant { name_idx: none_e, arity: 0 });
1197
1198        // Converge all paths.
1199        let end = self.code.len() as i32;
1200        if let Op::Jump(off) = &mut self.code[j_after_lazy] {
1201            *off = end - (j_after_lazy as i32 + 1);
1202        }
1203        if let Op::Jump(off) = &mut self.code[j_after_lazy_none] {
1204            *off = end - (j_after_lazy_none as i32 + 1);
1205        }
1206        if let Op::Jump(off) = &mut self.code[j_after_cursor] {
1207            *off = end - (j_after_cursor as i32 + 1);
1208        }
1209        if let Op::Jump(off) = &mut self.code[j_after_cursor_none] {
1210            *off = end - (j_after_cursor_none as i32 + 1);
1211        }
1212        if let Op::Jump(off) = &mut self.code[j_after_eager] {
1213            *off = end - (j_after_eager as i32 + 1);
1214        }
1215    }
1216
1217    /// `iter.unfold(seed, step)` — lazy iterator that calls `step(seed)` on
1218    /// each `iter.next` and threads the new seed forward. Internal value
1219    /// shape: `__IterLazy(seed, step)`. Step has type `(S) -> Option[(T, S)]`;
1220    /// returning `None` ends the iteration (#376).
1221    fn emit_iter_unfold(&mut self, args: &[a::CExpr]) {
1222        self.compile_expr(&args[0], false); // seed
1223        self.compile_expr(&args[1], false); // step
1224        let lazy = self.pool.variant("__IterLazy");
1225        self.emit(Op::MakeVariant { name_idx: lazy, arity: 2 });
1226    }
1227
1228    /// `iter.is_empty(it)` — true iff no further element. v1 supports the
1229    /// eager form O(1); on a lazy iter the seed sits in slot 0 and is not a
1230    /// List, so the VM trips on `GetListLen` rather than returning a wrong
1231    /// answer. Callers needing lazy support should materialize with
1232    /// `iter.to_list` first or call `iter.next` and pattern-match.
1233    fn emit_iter_is_empty(&mut self, args: &[a::CExpr]) {
1234        self.compile_expr(&args[0], false);
1235        let it = self.alloc_local("__ie_it");
1236        self.emit(Op::StoreLocal(it));
1237
1238        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1)); // idx
1239        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0)); // list
1240        self.emit(Op::GetListLen);                                     // len
1241        self.emit(Op::NumLt);                                          // idx < len
1242        self.emit(Op::BoolNot);                                        // NOT(idx < len)
1243    }
1244
1245    /// `iter.count(it)` — number of remaining elements (v1: eager-only).
1246    fn emit_iter_count(&mut self, args: &[a::CExpr]) {
1247        self.compile_expr(&args[0], false);
1248        let it = self.alloc_local("__ic_it");
1249        self.emit(Op::StoreLocal(it));
1250
1251        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1252        self.emit(Op::GetListLen);                                     // len
1253        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1)); // idx
1254        self.emit(Op::NumSub);                                         // len - idx
1255    }
1256
1257    /// `iter.take(it, n)` — collect up to n elements, return as new Iter.
1258    fn emit_iter_take(&mut self, args: &[a::CExpr]) {
1259        self.compile_expr(&args[0], false);
1260        let it   = self.alloc_local("__itk_it");
1261        self.emit(Op::StoreLocal(it));
1262
1263        self.compile_expr(&args[1], false);
1264        let n    = self.alloc_local("__itk_n");
1265        self.emit(Op::StoreLocal(n));
1266
1267        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1268        let list = self.alloc_local("__itk_list");
1269        self.emit(Op::StoreLocal(list));
1270
1271        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1272        let i    = self.alloc_local("__itk_i");
1273        self.emit(Op::StoreLocal(i));
1274
1275        self.emit(Op::MakeList(0));
1276        let out  = self.alloc_local("__itk_out");
1277        self.emit(Op::StoreLocal(out));
1278
1279        let zero = self.pool.int(0);
1280        self.emit(Op::PushConst(zero));
1281        let cnt  = self.alloc_local("__itk_cnt");
1282        self.emit(Op::StoreLocal(cnt));
1283
1284        let loop_top = self.code.len();
1285
1286        // while cnt < n
1287        self.emit(Op::LoadLocal(cnt));
1288        self.emit(Op::LoadLocal(n));
1289        self.emit(Op::NumLt);
1290        let j_exit_n = self.code.len();
1291        self.emit(Op::JumpIfNot(0));
1292
1293        // AND i < len(list)
1294        self.emit(Op::LoadLocal(i));
1295        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1296        self.emit(Op::NumLt);
1297        let j_exit_l = self.code.len();
1298        self.emit(Op::JumpIfNot(0));
1299
1300        // out = out ++ [list[i]]
1301        self.emit(Op::LoadLocal(out));
1302        self.emit(Op::LoadLocal(list));
1303        self.emit(Op::LoadLocal(i));
1304        self.emit(Op::GetListElemDyn);
1305        self.emit(Op::ListAppend);
1306        self.emit(Op::StoreLocal(out));
1307
1308        let one = self.pool.int(1);
1309        // i = i + 1
1310        self.emit(Op::LoadLocal(i));
1311        self.emit(Op::PushConst(one));
1312        self.emit(Op::NumAdd);
1313        self.emit(Op::StoreLocal(i));
1314        // cnt = cnt + 1
1315        self.emit(Op::LoadLocal(cnt));
1316        self.emit(Op::PushConst(one));
1317        self.emit(Op::NumAdd);
1318        self.emit(Op::StoreLocal(cnt));
1319
1320        let jback = self.code.len();
1321        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1322
1323        let exit_t = self.code.len() as i32;
1324        if let Op::JumpIfNot(off) = &mut self.code[j_exit_n] { *off = exit_t - (j_exit_n as i32 + 1); }
1325        if let Op::JumpIfNot(off) = &mut self.code[j_exit_l] { *off = exit_t - (j_exit_l as i32 + 1); }
1326
1327        // return new __IterEager(out, 0)
1328        self.emit(Op::LoadLocal(out));
1329        self.emit(Op::PushConst(zero));
1330        let eager_v = self.pool.variant("__IterEager");
1331        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1332    }
1333
1334    /// `iter.skip(it, n)` — advance cursor by n (or to end), return new Iter.
1335    fn emit_iter_skip(&mut self, args: &[a::CExpr]) {
1336        self.compile_expr(&args[0], false);
1337        let it   = self.alloc_local("__isk_it");
1338        self.emit(Op::StoreLocal(it));
1339
1340        self.compile_expr(&args[1], false);
1341        let n    = self.alloc_local("__isk_n");
1342        self.emit(Op::StoreLocal(n));
1343
1344        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1345        let list = self.alloc_local("__isk_list");
1346        self.emit(Op::StoreLocal(list));
1347
1348        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1349        let idx  = self.alloc_local("__isk_idx");
1350        self.emit(Op::StoreLocal(idx));
1351
1352        // raw = idx + n
1353        self.emit(Op::LoadLocal(idx));
1354        self.emit(Op::LoadLocal(n));
1355        self.emit(Op::NumAdd);
1356        let raw  = self.alloc_local("__isk_raw");
1357        self.emit(Op::StoreLocal(raw));
1358
1359        // new_idx = if raw < len then raw else len
1360        self.emit(Op::LoadLocal(raw));
1361        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1362        self.emit(Op::NumLt);
1363        let j_use_raw = self.code.len();
1364        self.emit(Op::JumpIf(0));
1365
1366        // use len
1367        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1368        let j_end = self.code.len();
1369        self.emit(Op::Jump(0));
1370
1371        // use raw
1372        let raw_t = self.code.len() as i32;
1373        if let Op::JumpIf(off) = &mut self.code[j_use_raw] { *off = raw_t - (j_use_raw as i32 + 1); }
1374        self.emit(Op::LoadLocal(raw));
1375
1376        let end_t = self.code.len() as i32;
1377        if let Op::Jump(off) = &mut self.code[j_end] { *off = end_t - (j_end as i32 + 1); }
1378
1379        // new_idx on stack; build new __IterEager(list, new_idx)
1380        let new_idx = self.alloc_local("__isk_ni");
1381        self.emit(Op::StoreLocal(new_idx));
1382        self.emit(Op::LoadLocal(list));
1383        self.emit(Op::LoadLocal(new_idx));
1384        let eager_v = self.pool.variant("__IterEager");
1385        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1386    }
1387
1388    /// `iter.to_list(it)` — materialise remaining elements into a List.
1389    ///
1390    /// Dispatches on the iter variant (#376):
1391    /// - `__IterLazy`: repeatedly call `step(seed)`; on `Some((t, s'))` append
1392    ///   `t` and continue with `s'`; on `None` stop. May hang on truly
1393    ///   infinite producers — that's documented as a v1 limitation, the
1394    ///   step-limit-protected caller is what catches misuse.
1395    /// - `__IterEager`: slice the backing list from `idx` onward (O(n) walk).
1396    fn emit_iter_to_list(&mut self, args: &[a::CExpr]) {
1397        self.compile_expr(&args[0], false);
1398        let it = self.alloc_local("__itl_it");
1399        self.emit(Op::StoreLocal(it));
1400
1401        // Build the output list up-front, shared across both paths.
1402        self.emit(Op::MakeList(0));
1403        let out = self.alloc_local("__itl_out");
1404        self.emit(Op::StoreLocal(out));
1405
1406        // Dispatch on variant tag.
1407        self.emit(Op::LoadLocal(it));
1408        let lazy_name = self.pool.variant("__IterLazy");
1409        self.emit(Op::TestVariant(lazy_name));
1410        let j_to_eager = self.code.len();
1411        self.emit(Op::JumpIfNot(0));
1412
1413        // ── lazy path ─────────────────────────────────────────────────
1414        // seed and step closure live in locals; we update seed each iteration.
1415        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1416        let seed = self.alloc_local("__itl_seed");
1417        self.emit(Op::StoreLocal(seed));
1418
1419        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1420        let step = self.alloc_local("__itl_step");
1421        self.emit(Op::StoreLocal(step));
1422
1423        let lazy_loop = self.code.len();
1424        let nid_lazy = self.pool.node_id("n_iter_to_list_lazy");
1425        self.emit(Op::LoadLocal(step));
1426        self.emit(Op::LoadLocal(seed));
1427        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid_lazy });
1428        let opt = self.alloc_local("__itl_opt");
1429        self.emit(Op::StoreLocal(opt));
1430
1431        // If None, drop out of the lazy loop.
1432        self.emit(Op::LoadLocal(opt));
1433        let some_name = self.pool.variant("Some");
1434        self.emit(Op::TestVariant(some_name));
1435        let j_lazy_exit = self.code.len();
1436        self.emit(Op::JumpIfNot(0));
1437
1438        // Some((t, new_seed)): append t to out, replace seed.
1439        self.emit(Op::LoadLocal(opt));
1440        self.emit(Op::GetVariantArg(0));
1441        let pair = self.alloc_local("__itl_pair");
1442        self.emit(Op::StoreLocal(pair));
1443
1444        self.emit(Op::LoadLocal(out));
1445        self.emit(Op::LoadLocal(pair)); self.emit(Op::GetElem(0));
1446        self.emit(Op::ListAppend);
1447        self.emit(Op::StoreLocal(out));
1448
1449        self.emit(Op::LoadLocal(pair)); self.emit(Op::GetElem(1));
1450        self.emit(Op::StoreLocal(seed));
1451
1452        let jback_lazy = self.code.len();
1453        self.emit(Op::Jump((lazy_loop as i32) - (jback_lazy as i32 + 1)));
1454
1455        let lazy_exit_t = self.code.len() as i32;
1456        if let Op::JumpIfNot(off) = &mut self.code[j_lazy_exit] {
1457            *off = lazy_exit_t - (j_lazy_exit as i32 + 1);
1458        }
1459        let j_after_lazy = self.code.len();
1460        self.emit(Op::Jump(0));
1461
1462        // ── eager path ────────────────────────────────────────────────
1463        let eager_t = self.code.len() as i32;
1464        if let Op::JumpIfNot(off) = &mut self.code[j_to_eager] {
1465            *off = eager_t - (j_to_eager as i32 + 1);
1466        }
1467
1468        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1469        let list = self.alloc_local("__itl_list");
1470        self.emit(Op::StoreLocal(list));
1471
1472        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1473        let i = self.alloc_local("__itl_i");
1474        self.emit(Op::StoreLocal(i));
1475
1476        let loop_top = self.code.len();
1477        self.emit(Op::LoadLocal(i));
1478        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1479        self.emit(Op::NumLt);
1480        let j_exit = self.code.len();
1481        self.emit(Op::JumpIfNot(0));
1482
1483        self.emit(Op::LoadLocal(out));
1484        self.emit(Op::LoadLocal(list));
1485        self.emit(Op::LoadLocal(i));
1486        self.emit(Op::GetListElemDyn);
1487        self.emit(Op::ListAppend);
1488        self.emit(Op::StoreLocal(out));
1489
1490        self.emit(Op::LoadLocal(i));
1491        let one = self.pool.int(1);
1492        self.emit(Op::PushConst(one));
1493        self.emit(Op::NumAdd);
1494        self.emit(Op::StoreLocal(i));
1495
1496        let jback = self.code.len();
1497        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1498
1499        let exit_t = self.code.len() as i32;
1500        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
1501            *off = exit_t - (j_exit as i32 + 1);
1502        }
1503
1504        // Converge: lazy path falls through here too.
1505        let converge = self.code.len() as i32;
1506        if let Op::Jump(off) = &mut self.code[j_after_lazy] {
1507            *off = converge - (j_after_lazy as i32 + 1);
1508        }
1509        self.emit(Op::LoadLocal(out));
1510    }
1511
1512    /// `iter.map(it, f)` — apply `f` to each remaining element; returns new Iter.
1513    fn emit_iter_map(&mut self, args: &[a::CExpr]) {
1514        self.compile_expr(&args[0], false);
1515        let it   = self.alloc_local("__im_it");
1516        self.emit(Op::StoreLocal(it));
1517
1518        self.compile_expr(&args[1], false);
1519        let f    = self.alloc_local("__im_f");
1520        self.emit(Op::StoreLocal(f));
1521
1522        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1523        let list = self.alloc_local("__im_list");
1524        self.emit(Op::StoreLocal(list));
1525
1526        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1527        let i    = self.alloc_local("__im_i");
1528        self.emit(Op::StoreLocal(i));
1529
1530        self.emit(Op::MakeList(0));
1531        let out  = self.alloc_local("__im_out");
1532        self.emit(Op::StoreLocal(out));
1533
1534        let loop_top = self.code.len();
1535        self.emit(Op::LoadLocal(i));
1536        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1537        self.emit(Op::NumLt);
1538        let j_exit = self.code.len();
1539        self.emit(Op::JumpIfNot(0));
1540
1541        let nid = self.pool.node_id("n_iter_map");
1542        self.emit(Op::LoadLocal(out));
1543        self.emit(Op::LoadLocal(f));
1544        self.emit(Op::LoadLocal(list));
1545        self.emit(Op::LoadLocal(i));
1546        self.emit(Op::GetListElemDyn);
1547        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
1548        self.emit(Op::ListAppend);
1549        self.emit(Op::StoreLocal(out));
1550
1551        self.emit(Op::LoadLocal(i));
1552        let one = self.pool.int(1);
1553        self.emit(Op::PushConst(one));
1554        self.emit(Op::NumAdd);
1555        self.emit(Op::StoreLocal(i));
1556
1557        let jback = self.code.len();
1558        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1559
1560        let exit_t = self.code.len() as i32;
1561        if let Op::JumpIfNot(off) = &mut self.code[j_exit] { *off = exit_t - (j_exit as i32 + 1); }
1562
1563        let zero = self.pool.int(0);
1564        self.emit(Op::LoadLocal(out));
1565        self.emit(Op::PushConst(zero));
1566        let eager_v = self.pool.variant("__IterEager");
1567        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1568    }
1569
1570    /// `iter.filter(it, pred)` — keep elements where pred is true; returns new Iter.
1571    fn emit_iter_filter(&mut self, args: &[a::CExpr]) {
1572        self.compile_expr(&args[0], false);
1573        let it   = self.alloc_local("__if_it");
1574        self.emit(Op::StoreLocal(it));
1575
1576        self.compile_expr(&args[1], false);
1577        let f    = self.alloc_local("__if_f");
1578        self.emit(Op::StoreLocal(f));
1579
1580        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1581        let list = self.alloc_local("__if_list");
1582        self.emit(Op::StoreLocal(list));
1583
1584        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1585        let i    = self.alloc_local("__if_i");
1586        self.emit(Op::StoreLocal(i));
1587
1588        self.emit(Op::MakeList(0));
1589        let out  = self.alloc_local("__if_out");
1590        self.emit(Op::StoreLocal(out));
1591
1592        let loop_top = self.code.len();
1593        self.emit(Op::LoadLocal(i));
1594        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1595        self.emit(Op::NumLt);
1596        let j_exit = self.code.len();
1597        self.emit(Op::JumpIfNot(0));
1598
1599        // elem := list[i]
1600        self.emit(Op::LoadLocal(list));
1601        self.emit(Op::LoadLocal(i));
1602        self.emit(Op::GetListElemDyn);
1603        let x    = self.alloc_local("__if_x");
1604        self.emit(Op::StoreLocal(x));
1605
1606        let nid = self.pool.node_id("n_iter_filter");
1607        self.emit(Op::LoadLocal(f));
1608        self.emit(Op::LoadLocal(x));
1609        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
1610        let j_skip = self.code.len();
1611        self.emit(Op::JumpIfNot(0));
1612
1613        self.emit(Op::LoadLocal(out));
1614        self.emit(Op::LoadLocal(x));
1615        self.emit(Op::ListAppend);
1616        self.emit(Op::StoreLocal(out));
1617
1618        let skip_t = self.code.len() as i32;
1619        if let Op::JumpIfNot(off) = &mut self.code[j_skip] { *off = skip_t - (j_skip as i32 + 1); }
1620
1621        self.emit(Op::LoadLocal(i));
1622        let one = self.pool.int(1);
1623        self.emit(Op::PushConst(one));
1624        self.emit(Op::NumAdd);
1625        self.emit(Op::StoreLocal(i));
1626
1627        let jback = self.code.len();
1628        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1629
1630        let exit_t = self.code.len() as i32;
1631        if let Op::JumpIfNot(off) = &mut self.code[j_exit] { *off = exit_t - (j_exit as i32 + 1); }
1632
1633        let zero = self.pool.int(0);
1634        self.emit(Op::LoadLocal(out));
1635        self.emit(Op::PushConst(zero));
1636        let eager_v = self.pool.variant("__IterEager");
1637        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1638    }
1639
1640    /// `iter.fold(it, init, f)` — left fold over remaining elements.
1641    fn emit_iter_fold(&mut self, args: &[a::CExpr]) {
1642        self.compile_expr(&args[0], false);
1643        let it   = self.alloc_local("__ifo_it");
1644        self.emit(Op::StoreLocal(it));
1645
1646        self.compile_expr(&args[1], false);
1647        let acc  = self.alloc_local("__ifo_acc");
1648        self.emit(Op::StoreLocal(acc));
1649
1650        self.compile_expr(&args[2], false);
1651        let f    = self.alloc_local("__ifo_f");
1652        self.emit(Op::StoreLocal(f));
1653
1654        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1655        let list = self.alloc_local("__ifo_list");
1656        self.emit(Op::StoreLocal(list));
1657
1658        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1659        let i    = self.alloc_local("__ifo_i");
1660        self.emit(Op::StoreLocal(i));
1661
1662        let loop_top = self.code.len();
1663        self.emit(Op::LoadLocal(i));
1664        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1665        self.emit(Op::NumLt);
1666        let j_exit = self.code.len();
1667        self.emit(Op::JumpIfNot(0));
1668
1669        let nid = self.pool.node_id("n_iter_fold");
1670        self.emit(Op::LoadLocal(f));
1671        self.emit(Op::LoadLocal(acc));
1672        self.emit(Op::LoadLocal(list));
1673        self.emit(Op::LoadLocal(i));
1674        self.emit(Op::GetListElemDyn);
1675        self.emit(Op::CallClosure { arity: 2, node_id_idx: nid });
1676        self.emit(Op::StoreLocal(acc));
1677
1678        self.emit(Op::LoadLocal(i));
1679        let one = self.pool.int(1);
1680        self.emit(Op::PushConst(one));
1681        self.emit(Op::NumAdd);
1682        self.emit(Op::StoreLocal(i));
1683
1684        let jback = self.code.len();
1685        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1686
1687        let exit_t = self.code.len() as i32;
1688        if let Op::JumpIfNot(off) = &mut self.code[j_exit] { *off = exit_t - (j_exit as i32 + 1); }
1689        self.emit(Op::LoadLocal(acc));
1690    }
1691
1692    /// `map.fold(m, init, f)` — left fold over `Map[K, V]` entries with a
1693    /// three-arg combiner `f(acc, k, v)`. Iteration order matches
1694    /// `map.entries` (BTreeMap-sorted by key). Materializes the entry
1695    /// list once via the runtime's `("map", "entries")` op, then runs
1696    /// the same inline loop as `list.fold`.
1697    fn emit_map_fold(&mut self, args: &[a::CExpr], node_id_idx: u32) {
1698        // xs := map.entries(m)
1699        self.compile_expr(&args[0], false);
1700        let map_kind = self.pool.str("map");
1701        let entries_op = self.pool.str("entries");
1702        self.emit(Op::EffectCall {
1703            kind_idx: map_kind,
1704            op_idx: entries_op,
1705            arity: 1,
1706            node_id_idx,
1707        });
1708        let xs = self.alloc_local("__mf_xs");
1709        self.emit(Op::StoreLocal(xs));
1710
1711        // acc := init
1712        self.compile_expr(&args[1], false);
1713        let acc = self.alloc_local("__mf_acc");
1714        self.emit(Op::StoreLocal(acc));
1715
1716        // f := <closure>
1717        self.compile_expr(&args[2], false);
1718        let f = self.alloc_local("__mf_f");
1719        self.emit(Op::StoreLocal(f));
1720
1721        // i := 0
1722        let zero = self.pool.int(0);
1723        self.emit(Op::PushConst(zero));
1724        let i = self.alloc_local("__mf_i");
1725        self.emit(Op::StoreLocal(i));
1726
1727        // loop_top: while i < len(xs)
1728        let loop_top = self.code.len();
1729        self.emit(Op::LoadLocal(i));
1730        self.emit(Op::LoadLocal(xs));
1731        self.emit(Op::GetListLen);
1732        self.emit(Op::NumLt);
1733        let j_exit = self.code.len();
1734        self.emit(Op::JumpIfNot(0));
1735
1736        // pair := xs[i]
1737        self.emit(Op::LoadLocal(xs));
1738        self.emit(Op::LoadLocal(i));
1739        self.emit(Op::GetListElemDyn);
1740        let pair = self.alloc_local("__mf_pair");
1741        self.emit(Op::StoreLocal(pair));
1742
1743        // acc := f(acc, pair.0, pair.1)
1744        let nid = self.pool.node_id("n_map_fold");
1745        self.emit(Op::LoadLocal(f));
1746        self.emit(Op::LoadLocal(acc));
1747        self.emit(Op::LoadLocal(pair));
1748        self.emit(Op::GetElem(0));
1749        self.emit(Op::LoadLocal(pair));
1750        self.emit(Op::GetElem(1));
1751        self.emit(Op::CallClosure { arity: 3, node_id_idx: nid });
1752        self.emit(Op::StoreLocal(acc));
1753
1754        // i := i + 1
1755        self.emit(Op::LoadLocal(i));
1756        let one = self.pool.int(1);
1757        self.emit(Op::PushConst(one));
1758        self.emit(Op::NumAdd);
1759        self.emit(Op::StoreLocal(i));
1760
1761        let jump_back = self.code.len();
1762        let back = (loop_top as i32) - (jump_back as i32 + 1);
1763        self.emit(Op::Jump(back));
1764
1765        let exit_target = self.code.len() as i32;
1766        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
1767            *off = exit_target - (j_exit as i32 + 1);
1768        }
1769        self.emit(Op::LoadLocal(acc));
1770    }
1771
1772    /// Inline pattern: `<module>.map(v, f)` and friends.
1773    /// `wrap_with`: variant tag whose payload triggers the call (Ok / Some / Err).
1774    /// `wrap_result`: if true, wrap the closure's result back in `wrap_with`
1775    /// (map shape); if false, expect the closure to return a wrapped value
1776    /// itself (and_then shape).
1777    fn emit_variant_map(
1778        &mut self,
1779        args: &[a::CExpr],
1780        wrap_with: &str,
1781        wrap_result: bool,
1782    ) {
1783        // args[0] = the wrapped value (Result/Option), args[1] = closure
1784        let wrap_idx = self.pool.variant(wrap_with);
1785
1786        // Compile and store the value into a local, evaluate closure on top of stack.
1787        self.compile_expr(&args[0], false);
1788        let val_slot = self.alloc_local("__hov");
1789        self.emit(Op::StoreLocal(val_slot));
1790
1791        self.compile_expr(&args[1], false);
1792        let f_slot = self.alloc_local("__hof");
1793        self.emit(Op::StoreLocal(f_slot));
1794
1795        // Stack discipline:
1796        //   load val ⇒ [v]
1797        //   dup     ⇒ [v, v]
1798        //   test    ⇒ [v, Bool]
1799        //   jumpifnot ⇒ [v]
1800        // Both branches end with [v] before the branch body.
1801        self.emit(Op::LoadLocal(val_slot));
1802        self.emit(Op::Dup);
1803        self.emit(Op::TestVariant(wrap_idx));
1804        let j_skip = self.code.len();
1805        self.emit(Op::JumpIfNot(0));
1806
1807        // Matched arm: extract payload, call closure on it.
1808        self.emit(Op::GetVariantArg(0));
1809        let arg_slot = self.alloc_local("__hov_arg");
1810        self.emit(Op::StoreLocal(arg_slot));
1811        self.emit(Op::LoadLocal(f_slot));
1812        self.emit(Op::LoadLocal(arg_slot));
1813        let nid = self.pool.node_id("n_hov");
1814        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
1815        if wrap_result {
1816            self.emit(Op::MakeVariant { name_idx: wrap_idx, arity: 1 });
1817        }
1818        let j_end = self.code.len();
1819        self.emit(Op::Jump(0));
1820
1821        // Skip arm: stack already has [v] from the failed Dup; nothing to do.
1822        let skip_target = self.code.len() as i32;
1823        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
1824            *off = skip_target - (j_skip as i32 + 1);
1825        }
1826
1827        let end_target = self.code.len() as i32;
1828        if let Op::Jump(off) = &mut self.code[j_end] {
1829            *off = end_target - (j_end as i32 + 1);
1830        }
1831    }
1832
1833    /// Sibling of `emit_variant_map` for the recovery combinators
1834    /// `result.or_else` and `option.or_else`. Differences from
1835    /// `emit_variant_map`:
1836    ///   - matches on the *negative* variant (`Err` / `None`)
1837    ///   - the closure's result becomes the call's result directly,
1838    ///     with no wrapping (it is itself a `Result` / `Option`)
1839    ///   - `option.or_else`'s closure takes zero args (`None` has no
1840    ///     payload to forward)
1841    fn emit_variant_or_else(
1842        &mut self,
1843        args: &[a::CExpr],
1844        match_on: &str,
1845        closure_arity: u16,
1846    ) {
1847        let match_idx = self.pool.variant(match_on);
1848
1849        self.compile_expr(&args[0], false);
1850        let val_slot = self.alloc_local("__hoe");
1851        self.emit(Op::StoreLocal(val_slot));
1852
1853        self.compile_expr(&args[1], false);
1854        let f_slot = self.alloc_local("__hoe_f");
1855        self.emit(Op::StoreLocal(f_slot));
1856
1857        // Stack discipline mirrors emit_variant_map:
1858        //   load val      ⇒ [v]
1859        //   dup           ⇒ [v, v]
1860        //   test          ⇒ [v, Bool]
1861        //   jumpifnot     ⇒ [v]
1862        // The unmatched arm leaves [v] (Ok/Some unchanged); the
1863        // matched arm pops [v] and pushes the closure's result.
1864        self.emit(Op::LoadLocal(val_slot));
1865        self.emit(Op::Dup);
1866        self.emit(Op::TestVariant(match_idx));
1867        let j_skip = self.code.len();
1868        self.emit(Op::JumpIfNot(0));
1869
1870        // Matched arm: pop the duplicate left on the stack,
1871        // then call the closure with whatever payload it expects.
1872        self.emit(Op::Pop);
1873        self.emit(Op::LoadLocal(f_slot));
1874        if closure_arity == 1 {
1875            self.emit(Op::LoadLocal(val_slot));
1876            self.emit(Op::GetVariantArg(0));
1877        }
1878        let nid = self.pool.node_id("n_hoe");
1879        self.emit(Op::CallClosure { arity: closure_arity, node_id_idx: nid });
1880
1881        let j_end = self.code.len();
1882        self.emit(Op::Jump(0));
1883
1884        // Unmatched arm: stack already holds [v]; nothing to do.
1885        let skip_target = self.code.len() as i32;
1886        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
1887            *off = skip_target - (j_skip as i32 + 1);
1888        }
1889
1890        let end_target = self.code.len() as i32;
1891        if let Op::Jump(off) = &mut self.code[j_end] {
1892            *off = end_target - (j_end as i32 + 1);
1893        }
1894    }
1895
1896    /// `option.unwrap_or_else(opt, f)` — lazy default via zero-arg thunk.
1897    ///   Some(x) → x          (unwrap; no wrapping)
1898    ///   None    → f()        (call thunk; return its result directly)
1899    fn emit_option_unwrap_or_else(&mut self, args: &[a::CExpr]) {
1900        let some_idx = self.pool.variant("Some");
1901
1902        // Compile opt and f; stash both so they're accessible on both arms.
1903        self.compile_expr(&args[0], false);
1904        let val_slot = self.alloc_local("__uoe_val");
1905        self.emit(Op::StoreLocal(val_slot));
1906
1907        self.compile_expr(&args[1], false);
1908        let f_slot = self.alloc_local("__uoe_f");
1909        self.emit(Op::StoreLocal(f_slot));
1910
1911        // Test whether opt is Some.
1912        //   load val ⇒ [v]
1913        //   dup      ⇒ [v, v]
1914        //   test     ⇒ [v, Bool]
1915        //   jumpifnot → None arm
1916        self.emit(Op::LoadLocal(val_slot));
1917        self.emit(Op::Dup);
1918        self.emit(Op::TestVariant(some_idx));
1919        let j_none = self.code.len();
1920        self.emit(Op::JumpIfNot(0));
1921
1922        // Some arm: extract the payload from [v] left on the stack.
1923        self.emit(Op::GetVariantArg(0));
1924        let j_end = self.code.len();
1925        self.emit(Op::Jump(0));
1926
1927        // None arm: pop the [v] duplicate, call the thunk.
1928        let none_target = self.code.len() as i32;
1929        if let Op::JumpIfNot(off) = &mut self.code[j_none] {
1930            *off = none_target - (j_none as i32 + 1);
1931        }
1932        self.emit(Op::Pop);
1933        self.emit(Op::LoadLocal(f_slot));
1934        let nid = self.pool.node_id("n_uoe");
1935        self.emit(Op::CallClosure { arity: 0, node_id_idx: nid });
1936
1937        // Patch jump-to-end from Some arm.
1938        let end_target = self.code.len() as i32;
1939        if let Op::Jump(off) = &mut self.code[j_end] {
1940            *off = end_target - (j_end as i32 + 1);
1941        }
1942    }
1943
1944    // ---- std.flow trampolines ----------------------------------------
1945    //
1946    // Each flow.<op>(c1, c2, ...) call site:
1947    //   1. compiles its closure args and leaves them on the stack
1948    //   2. registers a fresh "trampoline" Function whose body invokes
1949    //      those captured closures appropriately
1950    //   3. emits MakeClosure { fn_id: trampoline, capture_count: N }
1951    //
1952    // The trampoline's parameter layout is [capture_0, ..., capture_{N-1},
1953    // arg_0, ...]: captures first, the closure's own args after.
1954
1955    /// Allocate a fresh fn_id for a trampoline and install its bytecode.
1956    /// Trampolines are the one Function-creation path that already has
1957    /// the body in hand at install time (top-level fns and lambdas have
1958    /// it filled in later), so we compute `body_hash` immediately. The
1959    /// final hash pass at the end of `compile_program` is a no-op here.
1960    fn install_trampoline(&mut self, name: &str, arity: u16, locals_count: u16, code: Vec<Op>) -> u32 {
1961        let fn_id = self.next_fn_id.len() as u32;
1962        let body_hash = crate::program::compute_body_hash(
1963            arity, locals_count, &code, &self.pool.record_shapes);
1964        self.next_fn_id.push(Function {
1965            name: name.into(),
1966            arity,
1967            locals_count,
1968            code,
1969            effects: Vec::new(),
1970            body_hash,
1971            // Trampolines (flow.sequential / parallel / etc.) don't
1972            // surface refined params at this layer.
1973            refinements: Vec::new(),
1974        });
1975        fn_id
1976    }
1977
1978    /// `flow.sequential(f, g)` returns a closure `(x) -> g(f(x))`.
1979    fn emit_flow_sequential(&mut self, args: &[a::CExpr]) {
1980        // Push f, g; build the trampoline closure with 2 captures.
1981        self.compile_expr(&args[0], false);
1982        self.compile_expr(&args[1], false);
1983        let nid = self.pool.node_id("n_flow_sequential");
1984        let code = vec![
1985            // Locals: [f=0, g=1, x=2]
1986            Op::LoadLocal(0),                                  // push f
1987            Op::LoadLocal(2),                                  // push x
1988            Op::CallClosure { arity: 1, node_id_idx: nid },    // r = f(x)
1989            // stack: [r]
1990            Op::StoreLocal(3),                                 // tmp = r
1991            Op::LoadLocal(1),                                  // push g
1992            Op::LoadLocal(3),                                  // push tmp
1993            Op::CallClosure { arity: 1, node_id_idx: nid },    // r = g(tmp)
1994            Op::Return,
1995        ];
1996        let fn_id = self.install_trampoline("__flow_sequential", 3, 4, code);
1997        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
1998    }
1999
2000    /// `flow.parallel(fa, fb)` returns a closure `() -> (fa(), fb())`.
2001    /// Implementation is sequential: each function is called in order
2002    /// and the results are packed into a 2-tuple. The spec (§11.2)
2003    /// allows the runtime to apply true parallelism here; that needs
2004    /// a thread-safe handler split and is left to a follow-up. The
2005    /// signature is what users program against — sequential vs threaded
2006    /// is an implementation detail invisible to the type system.
2007    fn emit_flow_parallel(&mut self, args: &[a::CExpr]) {
2008        // Push fa, fb; build a 0-arg trampoline closure with 2 captures.
2009        self.compile_expr(&args[0], false);
2010        self.compile_expr(&args[1], false);
2011        let nid = self.pool.node_id("n_flow_parallel");
2012        let code = vec![
2013            // Locals: [fa=0, fb=1]
2014            Op::LoadLocal(0),                                  // push fa
2015            Op::CallClosure { arity: 0, node_id_idx: nid },    // a = fa()
2016            Op::LoadLocal(1),                                  // push fb
2017            Op::CallClosure { arity: 0, node_id_idx: nid },    // b = fb()
2018            Op::MakeTuple(2),                                  // (a, b)
2019            Op::Return,
2020        ];
2021        let fn_id = self.install_trampoline("__flow_parallel", 2, 2, code);
2022        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
2023    }
2024
2025    /// `flow.parallel_list(actions)` runs each 0-arg closure in `actions`
2026    /// and returns the results as a list in input order. Variadic
2027    /// counterpart to `flow.parallel`. Sequential under the hood — the
2028    /// spec (§11.2) reserves true threading for a future scheduler.
2029    /// Compiled inline (mirrors `list.map`) so closure args can flow
2030    /// through `CallClosure` without a heap-allocated trampoline.
2031    fn emit_flow_parallel_list(&mut self, args: &[a::CExpr]) {
2032        // xs := actions
2033        self.compile_expr(&args[0], false);
2034        let xs = self.alloc_local("__fpl_xs");
2035        self.emit(Op::StoreLocal(xs));
2036
2037        // out := []
2038        self.emit(Op::MakeList(0));
2039        let out = self.alloc_local("__fpl_out");
2040        self.emit(Op::StoreLocal(out));
2041
2042        // i := 0
2043        let zero = self.pool.int(0);
2044        self.emit(Op::PushConst(zero));
2045        let i = self.alloc_local("__fpl_i");
2046        self.emit(Op::StoreLocal(i));
2047
2048        // loop_top: while i < len(xs) { ... }
2049        let loop_top = self.code.len();
2050        self.emit(Op::LoadLocal(i));
2051        self.emit(Op::LoadLocal(xs));
2052        self.emit(Op::GetListLen);
2053        self.emit(Op::NumLt);
2054        let j_exit = self.code.len();
2055        self.emit(Op::JumpIfNot(0));
2056
2057        // body: out := out ++ [xs[i]()]
2058        let nid = self.pool.node_id("n_flow_parallel_list");
2059        self.emit(Op::LoadLocal(out));
2060        self.emit(Op::LoadLocal(xs));
2061        self.emit(Op::LoadLocal(i));
2062        self.emit(Op::GetListElemDyn);
2063        self.emit(Op::CallClosure { arity: 0, node_id_idx: nid });
2064        self.emit(Op::ListAppend);
2065        self.emit(Op::StoreLocal(out));
2066
2067        // i := i + 1
2068        self.emit(Op::LoadLocal(i));
2069        let one = self.pool.int(1);
2070        self.emit(Op::PushConst(one));
2071        self.emit(Op::NumAdd);
2072        self.emit(Op::StoreLocal(i));
2073
2074        // jump back
2075        let jump_back = self.code.len();
2076        let back = (loop_top as i32) - (jump_back as i32 + 1);
2077        self.emit(Op::Jump(back));
2078
2079        // exit: patch j_exit, push out
2080        let exit_target = self.code.len() as i32;
2081        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
2082            *off = exit_target - (j_exit as i32 + 1);
2083        }
2084        self.emit(Op::LoadLocal(out));
2085    }
2086
2087    /// `flow.branch(cond, t, f)` returns a closure `(x) -> if cond(x) then t(x) else f(x)`.
2088    fn emit_flow_branch(&mut self, args: &[a::CExpr]) {
2089        self.compile_expr(&args[0], false);
2090        self.compile_expr(&args[1], false);
2091        self.compile_expr(&args[2], false);
2092        let nid = self.pool.node_id("n_flow_branch");
2093        let mut code = vec![
2094            // Locals: [cond=0, t=1, f=2, x=3]
2095            Op::LoadLocal(0),                               // push cond
2096            Op::LoadLocal(3),                               // push x
2097            Op::CallClosure { arity: 1, node_id_idx: nid }, // bool
2098        ];
2099        let j_false = code.len();
2100        code.push(Op::JumpIfNot(0));                        // patched
2101        // true arm: t(x)
2102        code.push(Op::LoadLocal(1));
2103        code.push(Op::LoadLocal(3));
2104        code.push(Op::CallClosure { arity: 1, node_id_idx: nid });
2105        code.push(Op::Return);
2106        // false arm
2107        let false_target = code.len() as i32;
2108        if let Op::JumpIfNot(off) = &mut code[j_false] {
2109            *off = false_target - (j_false as i32 + 1);
2110        }
2111        code.push(Op::LoadLocal(2));
2112        code.push(Op::LoadLocal(3));
2113        code.push(Op::CallClosure { arity: 1, node_id_idx: nid });
2114        code.push(Op::Return);
2115
2116        let fn_id = self.install_trampoline("__flow_branch", 4, 4, code);
2117        self.emit(Op::MakeClosure { fn_id, capture_count: 3 });
2118    }
2119
2120    /// `flow.retry(f, max_attempts)` returns a closure `(x) -> Result[U, E]`
2121    /// that calls `f(x)` up to `max_attempts` times, returning the first
2122    /// `Ok` or the final `Err`.
2123    fn emit_flow_retry(&mut self, args: &[a::CExpr]) {
2124        self.compile_expr(&args[0], false);
2125        self.compile_expr(&args[1], false);
2126        let call_nid = self.pool.node_id("n_flow_retry");
2127        let ok_idx = self.pool.variant("Ok");
2128        let zero_const = self.pool.int(0);
2129        let one_const = self.pool.int(1);
2130        // Locals: [f=0, max=1, x=2, i=3, last=4]
2131        let mut code = vec![
2132            // i := 0
2133            Op::PushConst(zero_const),
2134            Op::StoreLocal(3),
2135        ];
2136        // loop_top: while i < max
2137        let loop_top = code.len() as i32;
2138        code.push(Op::LoadLocal(3));
2139        code.push(Op::LoadLocal(1));
2140        code.push(Op::NumLt);
2141        let j_done = code.len();
2142        code.push(Op::JumpIfNot(0));                       // patched
2143
2144        // body: r := f(x); last := r
2145        code.push(Op::LoadLocal(0));
2146        code.push(Op::LoadLocal(2));
2147        code.push(Op::CallClosure { arity: 1, node_id_idx: call_nid });
2148        code.push(Op::StoreLocal(4));
2149
2150        // Test variant Ok on last; if so, return last.
2151        code.push(Op::LoadLocal(4));
2152        code.push(Op::TestVariant(ok_idx));
2153        let j_was_err = code.len();
2154        code.push(Op::JumpIfNot(0));                       // patched: skip return
2155        code.push(Op::LoadLocal(4));
2156        code.push(Op::Return);
2157
2158        // was_err: i := i + 1; jump loop_top
2159        let was_err_target = code.len() as i32;
2160        if let Op::JumpIfNot(off) = &mut code[j_was_err] {
2161            *off = was_err_target - (j_was_err as i32 + 1);
2162        }
2163        code.push(Op::LoadLocal(3));
2164        code.push(Op::PushConst(one_const));
2165        code.push(Op::NumAdd);
2166        code.push(Op::StoreLocal(3));
2167        let pc_after_jump = code.len() as i32 + 1;
2168        code.push(Op::Jump(loop_top - pc_after_jump));
2169
2170        // done: return last (the final Err, or Unit if max=0).
2171        let done_target = code.len() as i32;
2172        if let Op::JumpIfNot(off) = &mut code[j_done] {
2173            *off = done_target - (j_done as i32 + 1);
2174        }
2175        code.push(Op::LoadLocal(4));
2176        code.push(Op::Return);
2177
2178        let fn_id = self.install_trampoline("__flow_retry", 3, 5, code);
2179        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
2180    }
2181
2182    /// `flow.retry_with_backoff(f, attempts, base_ms)` (#226). Variant
2183    /// of `flow.retry` that sleeps between attempts. The first
2184    /// attempt fires immediately; attempt k > 1 waits `base_ms *
2185    /// 2^(k-2)` ms before retrying. Sleeps go through
2186    /// `time.sleep_ms`, which is why the resulting closure carries
2187    /// `[time]` in its effect row even though the underlying `f` is
2188    /// pure.
2189    fn emit_flow_retry_with_backoff(&mut self, args: &[a::CExpr]) {
2190        // Push captures: f, max, base_ms. The trampoline takes one
2191        // call-time arg `x`, so capture_count = 3, arity = 4.
2192        self.compile_expr(&args[0], false);
2193        self.compile_expr(&args[1], false);
2194        self.compile_expr(&args[2], false);
2195        let call_nid    = self.pool.node_id("n_flow_retry_backoff");
2196        let sleep_nid   = self.pool.node_id("n_flow_retry_backoff_sleep");
2197        let kind_idx    = self.pool.str("time");
2198        let op_idx      = self.pool.str("sleep_ms");
2199        let ok_idx      = self.pool.variant("Ok");
2200        let zero_const  = self.pool.int(0);
2201        let one_const   = self.pool.int(1);
2202        let two_const   = self.pool.int(2);
2203        // Locals layout:
2204        //   0=f, 1=max, 2=base_ms (captures)
2205        //   3=x (arg)
2206        //   4=i, 5=last, 6=next_delay (working state)
2207        let mut code = vec![
2208            // next_delay := base_ms
2209            Op::LoadLocal(2),
2210            Op::StoreLocal(6),
2211            // i := 0
2212            Op::PushConst(zero_const),
2213            Op::StoreLocal(4),
2214        ];
2215
2216        let loop_top = code.len() as i32;
2217        // while i < max
2218        code.push(Op::LoadLocal(4));
2219        code.push(Op::LoadLocal(1));
2220        code.push(Op::NumLt);
2221        let j_done = code.len();
2222        code.push(Op::JumpIfNot(0)); // patched
2223
2224        // if i > 0: time.sleep_ms(next_delay); next_delay := next_delay * 2
2225        code.push(Op::PushConst(zero_const));
2226        code.push(Op::LoadLocal(4));
2227        code.push(Op::NumLt);                // 0 < i ?
2228        let j_no_sleep = code.len();
2229        code.push(Op::JumpIfNot(0));         // patched: skip the sleep block
2230        // Sleep
2231        code.push(Op::LoadLocal(6));         // arg = next_delay
2232        code.push(Op::EffectCall {
2233            kind_idx, op_idx, arity: 1, node_id_idx: sleep_nid,
2234        });
2235        code.push(Op::Pop);                  // discard the Unit result
2236        // next_delay := next_delay * 2
2237        code.push(Op::LoadLocal(6));
2238        code.push(Op::PushConst(two_const));
2239        code.push(Op::NumMul);
2240        code.push(Op::StoreLocal(6));
2241        // patch the no-sleep skip
2242        let after_sleep = code.len() as i32;
2243        if let Op::JumpIfNot(off) = &mut code[j_no_sleep] {
2244            *off = after_sleep - (j_no_sleep as i32 + 1);
2245        }
2246
2247        // last := f(x)
2248        code.push(Op::LoadLocal(0));
2249        code.push(Op::LoadLocal(3));
2250        code.push(Op::CallClosure { arity: 1, node_id_idx: call_nid });
2251        code.push(Op::StoreLocal(5));
2252
2253        // if Ok(last): return last
2254        code.push(Op::LoadLocal(5));
2255        code.push(Op::TestVariant(ok_idx));
2256        let j_was_err = code.len();
2257        code.push(Op::JumpIfNot(0)); // patched
2258        code.push(Op::LoadLocal(5));
2259        code.push(Op::Return);
2260
2261        // was_err: i := i + 1; jump loop_top
2262        let was_err_target = code.len() as i32;
2263        if let Op::JumpIfNot(off) = &mut code[j_was_err] {
2264            *off = was_err_target - (j_was_err as i32 + 1);
2265        }
2266        code.push(Op::LoadLocal(4));
2267        code.push(Op::PushConst(one_const));
2268        code.push(Op::NumAdd);
2269        code.push(Op::StoreLocal(4));
2270        let pc_after_jump = code.len() as i32 + 1;
2271        code.push(Op::Jump(loop_top - pc_after_jump));
2272
2273        // done: return last (the final Err, or Unit if max=0).
2274        let done_target = code.len() as i32;
2275        if let Op::JumpIfNot(off) = &mut code[j_done] {
2276            *off = done_target - (j_done as i32 + 1);
2277        }
2278        code.push(Op::LoadLocal(5));
2279        code.push(Op::Return);
2280
2281        let fn_id = self.install_trampoline("__flow_retry_backoff", 4, 7, code);
2282        self.emit(Op::MakeClosure { fn_id, capture_count: 3 });
2283    }
2284}
2285
2286/// Collect free variables referenced in `e` that are not in `bound`.
2287/// Mutates `bound` to track let/lambda introductions during the walk;
2288/// the caller's set is preserved on return because Rust's borrow rules
2289/// force us to clone for sub-scopes that rebind a name.
2290fn free_vars(e: &a::CExpr, bound: &mut std::collections::HashSet<String>, out: &mut Vec<String>) {
2291    match e {
2292        a::CExpr::Literal { .. } => {}
2293        a::CExpr::Var { name } => {
2294            if !bound.contains(name) && !out.contains(name) {
2295                out.push(name.clone());
2296            }
2297        }
2298        a::CExpr::Call { callee, args } => {
2299            free_vars(callee, bound, out);
2300            for a in args { free_vars(a, bound, out); }
2301        }
2302        a::CExpr::Let { name, value, body, .. } => {
2303            free_vars(value, bound, out);
2304            let was_bound = bound.contains(name);
2305            bound.insert(name.clone());
2306            free_vars(body, bound, out);
2307            if !was_bound { bound.remove(name); }
2308        }
2309        a::CExpr::Match { scrutinee, arms } => {
2310            free_vars(scrutinee, bound, out);
2311            for arm in arms {
2312                let mut local_bound = bound.clone();
2313                pattern_binders(&arm.pattern, &mut local_bound);
2314                free_vars(&arm.body, &mut local_bound, out);
2315            }
2316        }
2317        a::CExpr::Block { statements, result } => {
2318            let mut local_bound = bound.clone();
2319            for s in statements { free_vars(s, &mut local_bound, out); }
2320            free_vars(result, &mut local_bound, out);
2321        }
2322        a::CExpr::Constructor { args, .. } => {
2323            for a in args { free_vars(a, bound, out); }
2324        }
2325        a::CExpr::RecordLit { fields } => {
2326            for f in fields { free_vars(&f.value, bound, out); }
2327        }
2328        a::CExpr::TupleLit { items } | a::CExpr::ListLit { items } => {
2329            for it in items { free_vars(it, bound, out); }
2330        }
2331        a::CExpr::FieldAccess { value, .. } => free_vars(value, bound, out),
2332        a::CExpr::Lambda { params, body, .. } => {
2333            let mut inner = bound.clone();
2334            for p in params { inner.insert(p.name.clone()); }
2335            free_vars(body, &mut inner, out);
2336        }
2337        a::CExpr::BinOp { lhs, rhs, .. } => {
2338            free_vars(lhs, bound, out);
2339            free_vars(rhs, bound, out);
2340        }
2341        a::CExpr::UnaryOp { expr, .. } => free_vars(expr, bound, out),
2342        a::CExpr::Return { value } => free_vars(value, bound, out),
2343    }
2344}
2345
2346fn pattern_binders(p: &a::Pattern, bound: &mut std::collections::HashSet<String>) {
2347    match p {
2348        a::Pattern::PWild | a::Pattern::PLiteral { .. } => {}
2349        a::Pattern::PVar { name } => { bound.insert(name.clone()); }
2350        a::Pattern::PConstructor { args, .. } => {
2351            for a in args { pattern_binders(a, bound); }
2352        }
2353        a::Pattern::PRecord { fields } => {
2354            for f in fields { pattern_binders(&f.pattern, bound); }
2355        }
2356        a::Pattern::PTuple { items } => {
2357            for it in items { pattern_binders(it, bound); }
2358        }
2359    }
2360}