Skip to main content

lex_bytecode/
compiler.rs

1//! M4 compiler: canonical AST → bytecode.
2
3use crate::op::*;
4use crate::program::*;
5use indexmap::IndexMap;
6use lex_ast as a;
7
8#[derive(Default)]
9struct ConstPool {
10    pool: Vec<Const>,
11    fields: IndexMap<String, u32>,
12    variants: IndexMap<String, u32>,
13    node_ids: IndexMap<String, u32>,
14    ints: IndexMap<i64, u32>,
15    bools: IndexMap<u8, u32>,
16    strs: IndexMap<String, u32>,
17}
18
19impl ConstPool {
20    fn field(&mut self, name: &str) -> u32 {
21        if let Some(i) = self.fields.get(name) { return *i; }
22        let i = self.pool.len() as u32;
23        self.pool.push(Const::FieldName(name.into()));
24        self.fields.insert(name.into(), i);
25        i
26    }
27    fn variant(&mut self, name: &str) -> u32 {
28        if let Some(i) = self.variants.get(name) { return *i; }
29        let i = self.pool.len() as u32;
30        self.pool.push(Const::VariantName(name.into()));
31        self.variants.insert(name.into(), i);
32        i
33    }
34    fn node_id(&mut self, name: &str) -> u32 {
35        if let Some(i) = self.node_ids.get(name) { return *i; }
36        let i = self.pool.len() as u32;
37        self.pool.push(Const::NodeId(name.into()));
38        self.node_ids.insert(name.into(), i);
39        i
40    }
41    fn int(&mut self, n: i64) -> u32 {
42        if let Some(i) = self.ints.get(&n) { return *i; }
43        let i = self.pool.len() as u32;
44        self.pool.push(Const::Int(n));
45        self.ints.insert(n, i);
46        i
47    }
48    fn bool(&mut self, b: bool) -> u32 {
49        let key = b as u8;
50        if let Some(i) = self.bools.get(&key) { return *i; }
51        let i = self.pool.len() as u32;
52        self.pool.push(Const::Bool(b));
53        self.bools.insert(key, i);
54        i
55    }
56    fn str(&mut self, s: &str) -> u32 {
57        if let Some(i) = self.strs.get(s) { return *i; }
58        let i = self.pool.len() as u32;
59        self.pool.push(Const::Str(s.into()));
60        self.strs.insert(s.into(), i);
61        i
62    }
63    fn float(&mut self, f: f64) -> u32 {
64        // Floats: not deduped (NaN issues).
65        let i = self.pool.len() as u32;
66        self.pool.push(Const::Float(f));
67        i
68    }
69    fn unit(&mut self) -> u32 {
70        let i = self.pool.len() as u32;
71        self.pool.push(Const::Unit);
72        i
73    }
74}
75
76pub fn compile_program(stages: &[a::Stage]) -> Program {
77    let mut p = Program {
78        constants: Vec::new(),
79        functions: Vec::new(),
80        function_names: IndexMap::new(),
81        module_aliases: IndexMap::new(),
82        entry: None,
83    };
84
85    // Collect imports as alias → module-name. The module name is the part
86    // after `std.` (so `import "std.io" as io` ⇒ alias `io` → module `io`).
87    for s in stages {
88        if let a::Stage::Import(i) = s {
89            let module = i.reference.strip_prefix("std.").unwrap_or(&i.reference).to_string();
90            p.module_aliases.insert(i.alias.clone(), module);
91        }
92    }
93
94    for s in stages {
95        if let a::Stage::FnDecl(fd) = s {
96            let idx = p.functions.len() as u32;
97            p.function_names.insert(fd.name.clone(), idx);
98            p.functions.push(Function {
99                name: fd.name.clone(),
100                arity: fd.params.len() as u16,
101                locals_count: 0,
102                code: Vec::new(),
103                effects: fd.effects.iter().map(|e| DeclaredEffect {
104                    kind: e.name.clone(),
105                    arg: e.arg.as_ref().map(|a| match a {
106                        a::EffectArg::Str { value } => EffectArg::Str(value.clone()),
107                        a::EffectArg::Int { value } => EffectArg::Int(*value),
108                        a::EffectArg::Ident { value } => EffectArg::Ident(value.clone()),
109                    }),
110                }).collect(),
111                // Filled in at the end of the compile pass, once `code`
112                // and `locals_count` are final. See #222.
113                body_hash: crate::program::ZERO_BODY_HASH,
114                // Per-param refinement predicates for runtime check
115                // (#209 slice 3). Lifted directly from each param's
116                // `TypeExpr::Refined` if present; `None` otherwise.
117                refinements: fd.params.iter().map(|p| match &p.ty {
118                    a::TypeExpr::Refined { binding, predicate, .. } =>
119                        Some(crate::program::Refinement {
120                            binding: binding.clone(),
121                            predicate: (**predicate).clone(),
122                        }),
123                    _ => None,
124                }).collect(),
125            });
126        }
127    }
128
129    let mut pool = ConstPool::default();
130    let function_names = p.function_names.clone();
131    let module_aliases = p.module_aliases.clone();
132    let mut pending_lambdas: Vec<PendingLambda> = Vec::new();
133
134    for s in stages {
135        if let a::Stage::FnDecl(_) = s {
136            // Build a NodeId map for *this* stage so the compiler can stamp
137            // each Call/EffectCall opcode with the originating AST node.
138            let id_map = lex_ast::expr_ids(s);
139            let fd = match s { a::Stage::FnDecl(fd) => fd, _ => unreachable!() };
140            let mut fc = FnCompiler {
141                code: Vec::new(),
142                locals: IndexMap::new(),
143                next_local: 0,
144                peak_local: 0,
145                pool: &mut pool,
146                function_names: &function_names,
147                module_aliases: &module_aliases,
148                id_map: &id_map,
149                pending_lambdas: &mut pending_lambdas,
150                next_fn_id: &mut p.functions,
151            };
152            for param in &fd.params {
153                let i = fc.next_local;
154                fc.locals.insert(param.name.clone(), i);
155                fc.next_local += 1;
156                fc.peak_local = fc.next_local;
157            }
158            fc.compile_expr(&fd.body, true);
159            fc.code.push(Op::Return);
160            let code = std::mem::take(&mut fc.code);
161            let peak = fc.peak_local;
162            drop(fc);
163            let idx = function_names[&fd.name];
164            p.functions[idx as usize].code = code;
165            p.functions[idx as usize].locals_count = peak;
166        }
167    }
168
169    // Compile pending lambdas in FIFO order. Each lambda may emit further
170    // lambdas; loop until the queue drains.
171    while let Some(pl) = pending_lambdas.pop() {
172        let id_map = std::collections::HashMap::new();
173        let mut fc = FnCompiler {
174            code: Vec::new(),
175            locals: IndexMap::new(),
176            next_local: 0,
177            peak_local: 0,
178            pool: &mut pool,
179            function_names: &function_names,
180            module_aliases: &module_aliases,
181            id_map: &id_map,
182            pending_lambdas: &mut pending_lambdas,
183            next_fn_id: &mut p.functions,
184        };
185        for name in &pl.capture_names {
186            let i = fc.next_local;
187            fc.locals.insert(name.clone(), i);
188            fc.next_local += 1;
189            fc.peak_local = fc.next_local;
190        }
191        for p in &pl.params {
192            let i = fc.next_local;
193            fc.locals.insert(p.name.clone(), i);
194            fc.next_local += 1;
195            fc.peak_local = fc.next_local;
196        }
197        fc.compile_expr(&pl.body, true);
198        fc.code.push(Op::Return);
199        let code = std::mem::take(&mut fc.code);
200        let peak = fc.peak_local;
201        drop(fc);
202        p.functions[pl.fn_id as usize].code = code;
203        p.functions[pl.fn_id as usize].locals_count = peak;
204    }
205
206    // Final pass: stamp every function with its content hash now that
207    // every body is finalized (#222). Trampolines installed via
208    // `install_trampoline` already have it; recomputing is cheap and
209    // makes the invariant easier to read at this top level.
210    for f in p.functions.iter_mut() {
211        if f.body_hash == crate::program::ZERO_BODY_HASH {
212            f.body_hash = crate::program::compute_body_hash(
213                f.arity, f.locals_count, &f.code);
214        }
215    }
216
217    p.constants = pool.pool;
218    p
219}
220
221#[derive(Debug, Clone)]
222struct PendingLambda {
223    fn_id: u32,
224    /// Names of captured outer-scope locals, in order.
225    capture_names: Vec<String>,
226    params: Vec<a::Param>,
227    body: a::CExpr,
228}
229
230struct FnCompiler<'a> {
231    code: Vec<Op>,
232    locals: IndexMap<String, u16>,
233    next_local: u16,
234    /// Peak local usage seen during compilation (for VM frame sizing).
235    peak_local: u16,
236    pool: &'a mut ConstPool,
237    function_names: &'a IndexMap<String, u32>,
238    module_aliases: &'a IndexMap<String, String>,
239    /// CExpr address → NodeId, populated per stage via `lex_ast::expr_ids`.
240    id_map: &'a std::collections::HashMap<*const a::CExpr, lex_ast::NodeId>,
241    /// Queue of lambdas discovered during compilation; each gets a fresh
242    /// fn_id and is compiled in a later pass.
243    pending_lambdas: &'a mut Vec<PendingLambda>,
244    /// Mutable view of the function table — used to allocate fn_ids for
245    /// freshly-discovered lambdas.
246    next_fn_id: &'a mut Vec<Function>,
247}
248
249impl<'a> FnCompiler<'a> {
250    fn alloc_local(&mut self, name: &str) -> u16 {
251        let i = self.next_local;
252        self.locals.insert(name.into(), i);
253        self.next_local += 1;
254        if self.next_local > self.peak_local { self.peak_local = self.next_local; }
255        i
256    }
257    fn emit(&mut self, op: Op) { self.code.push(op); }
258
259    fn compile_expr(&mut self, e: &a::CExpr, tail: bool) {
260        match e {
261            a::CExpr::Literal { value } => self.compile_lit(value),
262            a::CExpr::Var { name } => {
263                if let Some(slot) = self.locals.get(name) {
264                    self.emit(Op::LoadLocal(*slot));
265                } else if let Some(&fn_id) = self.function_names.get(name) {
266                    // Function name used as a *value* (e.g. as a record-field
267                    // initializer or fold-callback arg) — materialize it as a
268                    // closure with no captures. The runtime already accepts
269                    // `Value::Closure { fn_id, captures: vec![] }` and
270                    // `CallClosure` dispatches it. (#169)
271                    self.emit(Op::MakeClosure { fn_id, capture_count: 0 });
272                } else {
273                    // Should be caught at type-check time; the type checker
274                    // walks every Var. If we land here it's a compiler bug,
275                    // not a user typo.
276                    panic!("unknown var in compiler: {name}");
277                }
278            }
279            a::CExpr::Let { name, ty: _, value, body } => {
280                self.compile_expr(value, false);
281                let slot = self.alloc_local(name);
282                self.emit(Op::StoreLocal(slot));
283                self.compile_expr(body, tail);
284            }
285            a::CExpr::Block { statements, result } => {
286                for s in statements {
287                    self.compile_expr(s, false);
288                    self.emit(Op::Pop);
289                }
290                self.compile_expr(result, tail);
291            }
292            a::CExpr::Call { callee, args } => self.compile_call(e, callee, args, tail),
293            a::CExpr::Constructor { name, args } => {
294                for a in args { self.compile_expr(a, false); }
295                let name_idx = self.pool.variant(name);
296                self.emit(Op::MakeVariant { name_idx, arity: args.len() as u16 });
297            }
298            a::CExpr::Match { scrutinee, arms } => self.compile_match(scrutinee, arms, tail),
299            a::CExpr::RecordLit { fields } => {
300                let mut idxs = Vec::with_capacity(fields.len());
301                for f in fields {
302                    self.compile_expr(&f.value, false);
303                    idxs.push(self.pool.field(&f.name));
304                }
305                self.emit(Op::MakeRecord { field_name_indices: idxs });
306            }
307            a::CExpr::TupleLit { items } => {
308                for it in items { self.compile_expr(it, false); }
309                self.emit(Op::MakeTuple(items.len() as u16));
310            }
311            a::CExpr::ListLit { items } => {
312                for it in items { self.compile_expr(it, false); }
313                self.emit(Op::MakeList(items.len() as u32));
314            }
315            a::CExpr::FieldAccess { value, field } => {
316                self.compile_expr(value, false);
317                let idx = self.pool.field(field);
318                self.emit(Op::GetField(idx));
319            }
320            a::CExpr::BinOp { op, lhs, rhs } => self.compile_binop(op, lhs, rhs),
321            a::CExpr::UnaryOp { op, expr } => {
322                self.compile_expr(expr, false);
323                match op.as_str() {
324                    "-" => self.emit(Op::NumNeg),
325                    "not" => self.emit(Op::BoolNot),
326                    other => panic!("unknown unary: {other}"),
327                }
328            }
329            a::CExpr::Lambda { params, body, .. } => self.compile_lambda(params, body),
330            a::CExpr::Return { value } => {
331                self.compile_expr(value, true);
332                self.emit(Op::Return);
333            }
334        }
335    }
336
337    fn compile_lit(&mut self, l: &a::CLit) {
338        let i = match l {
339            a::CLit::Int { value } => self.pool.int(*value),
340            a::CLit::Bool { value } => self.pool.bool(*value),
341            a::CLit::Float { value } => {
342                let f: f64 = value.parse().unwrap_or(0.0);
343                self.pool.float(f)
344            }
345            a::CLit::Str { value } => self.pool.str(value),
346            a::CLit::Bytes { value: _ } => {
347                // Stub: M4 doesn't use bytes literals in §3.13 examples.
348                let i = self.pool.pool.len() as u32;
349                self.pool.pool.push(Const::Bytes(Vec::new()));
350                i
351            }
352            a::CLit::Unit => self.pool.unit(),
353        };
354        self.emit(Op::PushConst(i));
355    }
356
357    fn compile_call(&mut self, call_expr: &a::CExpr, callee: &a::CExpr, args: &[a::CExpr], tail: bool) {
358        let node_id = self
359            .id_map
360            .get(&(call_expr as *const a::CExpr))
361            .map(|n| n.as_str().to_string())
362            .unwrap_or_else(|| "n_?".into());
363        let node_id_idx = self.pool.node_id(&node_id);
364
365        // Module function call: `alias.op(args)` where `alias` is an imported
366        // module ⇒ EffectCall, except for higher-order pure ops where we
367        // emit inline bytecode using CallClosure (the closure-arg can't be
368        // serialized through the effect handler).
369        if let a::CExpr::FieldAccess { value, field } = callee {
370            if let a::CExpr::Var { name } = value.as_ref() {
371                if let Some(module) = self.module_aliases.get(name) {
372                    if self.try_emit_higher_order(module, field, args, node_id_idx) {
373                        let _ = tail;
374                        return;
375                    }
376                    for a in args { self.compile_expr(a, false); }
377                    let kind_idx = self.pool.str(module);
378                    let op_idx = self.pool.str(field);
379                    self.emit(Op::EffectCall {
380                        kind_idx,
381                        op_idx,
382                        arity: args.len() as u16,
383                        node_id_idx,
384                    });
385                    let _ = tail;
386                    return;
387                }
388            }
389        }
390        match callee {
391            a::CExpr::Var { name } if self.function_names.contains_key(name) => {
392                for a in args { self.compile_expr(a, false); }
393                let fn_id = self.function_names[name];
394                if tail {
395                    self.emit(Op::TailCall { fn_id, arity: args.len() as u16, node_id_idx });
396                } else {
397                    self.emit(Op::Call { fn_id, arity: args.len() as u16, node_id_idx });
398                }
399            }
400            a::CExpr::Var { name } if self.locals.contains_key(name) => {
401                // First-class function value bound to a local. Push the
402                // closure, then args, then CallClosure.
403                let slot = self.locals[name];
404                self.emit(Op::LoadLocal(slot));
405                for a in args { self.compile_expr(a, false); }
406                self.emit(Op::CallClosure { arity: args.len() as u16, node_id_idx });
407            }
408            // Lambda directly applied — push closure + args + CallClosure.
409            other => {
410                self.compile_expr(other, false);
411                for a in args { self.compile_expr(a, false); }
412                self.emit(Op::CallClosure { arity: args.len() as u16, node_id_idx });
413            }
414        }
415    }
416
417    fn compile_binop(&mut self, op: &str, lhs: &a::CExpr, rhs: &a::CExpr) {
418        self.compile_expr(lhs, false);
419        self.compile_expr(rhs, false);
420        match op {
421            "+" => self.emit(Op::NumAdd),
422            "-" => self.emit(Op::NumSub),
423            "*" => self.emit(Op::NumMul),
424            "/" => self.emit(Op::NumDiv),
425            "%" => self.emit(Op::NumMod),
426            "==" => self.emit(Op::NumEq),
427            "!=" => { self.emit(Op::NumEq); self.emit(Op::BoolNot); }
428            "<" => self.emit(Op::NumLt),
429            "<=" => self.emit(Op::NumLe),
430            ">" => { self.emit_swap_top2(); self.emit(Op::NumLt); }
431            ">=" => { self.emit_swap_top2(); self.emit(Op::NumLe); }
432            "and" => self.emit(Op::BoolAnd),
433            "or" => self.emit(Op::BoolOr),
434            other => panic!("unknown binop: {other:?}"),
435        }
436    }
437
438    fn emit_swap_top2(&mut self) {
439        let a = self.alloc_local("__swap_a");
440        let b = self.alloc_local("__swap_b");
441        self.emit(Op::StoreLocal(b));
442        self.emit(Op::StoreLocal(a));
443        self.emit(Op::LoadLocal(b));
444        self.emit(Op::LoadLocal(a));
445    }
446
447    fn compile_match(&mut self, scrutinee: &a::CExpr, arms: &[a::Arm], tail: bool) {
448        self.compile_expr(scrutinee, false);
449        let scrut_slot = self.alloc_local("__scrut");
450        self.emit(Op::StoreLocal(scrut_slot));
451
452        let mut end_jumps: Vec<usize> = Vec::new();
453        for arm in arms {
454            let arm_start_locals = self.next_local;
455            let arm_start_locals_map = self.locals.clone();
456
457            self.emit(Op::LoadLocal(scrut_slot));
458            let mut bindings: Vec<(String, u16)> = Vec::new();
459            let fail_jumps: Vec<usize> = self.compile_pattern_test(&arm.pattern, &mut bindings);
460
461            self.compile_expr(&arm.body, tail);
462            let j_end = self.code.len();
463            self.emit(Op::Jump(0));
464            end_jumps.push(j_end);
465
466            let fail_target = self.code.len() as i32;
467            for j in fail_jumps {
468                // #337: PConstructor patterns now register an
469                // unconditional `Op::Jump` for the failure path
470                // (alongside the existing `Op::JumpIfNot` from
471                // PLiteral / nested constructor tests). Patch
472                // either shape.
473                match &mut self.code[j] {
474                    Op::JumpIfNot(off) => *off = fail_target - (j as i32 + 1),
475                    Op::Jump(off)      => *off = fail_target - (j as i32 + 1),
476                    _ => {}
477                }
478            }
479            self.next_local = arm_start_locals;
480            self.locals = arm_start_locals_map;
481        }
482        let panic_msg_idx = self.pool.str("non-exhaustive match");
483        self.emit(Op::Panic(panic_msg_idx));
484
485        let end_target = self.code.len() as i32;
486        for j in end_jumps {
487            if let Op::Jump(off) = &mut self.code[j] {
488                *off = end_target - (j as i32 + 1);
489            }
490        }
491    }
492
493    fn compile_pattern_test(&mut self, p: &a::Pattern, bindings: &mut Vec<(String, u16)>) -> Vec<usize> {
494        let mut fails = Vec::new();
495        match p {
496            a::Pattern::PWild => { self.emit(Op::Pop); }
497            a::Pattern::PVar { name } => {
498                let slot = self.alloc_local(name);
499                self.emit(Op::StoreLocal(slot));
500                bindings.push((name.clone(), slot));
501            }
502            a::Pattern::PLiteral { value } => {
503                self.compile_lit(value);
504                match value {
505                    a::CLit::Str { .. } => self.emit(Op::StrEq),
506                    a::CLit::Bytes { .. } => self.emit(Op::BytesEq),
507                    _ => self.emit(Op::NumEq),
508                }
509                let j = self.code.len();
510                self.emit(Op::JumpIfNot(0));
511                fails.push(j);
512            }
513            a::Pattern::PConstructor { name, args } => {
514                let name_idx = self.pool.variant(name);
515                // #337: the failure path must drop the duplicated
516                // scrutinee so subsequent match arms see a clean
517                // stack. The previous shape
518                //   Dup; TestVariant; JumpIfNot(fail);
519                // left `[scrut]` on the stack at the fail target,
520                // poisoning later arms — e.g. a wildcard `_` arm
521                // whose body referenced an unrelated value would
522                // pop the leaked scrutinee instead of its own value.
523                //
524                // New shape: branch on success, fall through to a
525                // failure cleanup that pops the dup'd scrutinee
526                // before jumping. The registered fail-jump is an
527                // unconditional `Op::Jump`; `compile_match`'s patch
528                // loop accepts both `JumpIfNot` and `Jump`.
529                self.emit(Op::Dup);                   // [scrut, scrut]
530                self.emit(Op::TestVariant(name_idx)); // [scrut, Bool]
531                let j_success = self.code.len();
532                self.emit(Op::JumpIf(0));             // pop Bool. success → [scrut]
533                self.emit(Op::Pop);                   // failure cleanup: [scrut] → []
534                let j_fail = self.code.len();
535                self.emit(Op::Jump(0));               // → fail target with []
536                fails.push(j_fail);
537                let success_target = self.code.len() as i32;
538                if let Op::JumpIf(off) = &mut self.code[j_success] {
539                    *off = success_target - (j_success as i32 + 1);
540                }
541                if args.is_empty() {
542                    self.emit(Op::Pop);
543                } else if args.len() == 1 {
544                    self.emit(Op::GetVariantArg(0));
545                    let sub_fails = self.compile_pattern_test(&args[0], bindings);
546                    fails.extend(sub_fails);
547                } else {
548                    let slot = self.alloc_local("__variant");
549                    self.emit(Op::StoreLocal(slot));
550                    for (i, arg) in args.iter().enumerate() {
551                        self.emit(Op::LoadLocal(slot));
552                        self.emit(Op::GetVariantArg(i as u16));
553                        let sub_fails = self.compile_pattern_test(arg, bindings);
554                        fails.extend(sub_fails);
555                    }
556                }
557            }
558            a::Pattern::PRecord { fields } => {
559                let slot = self.alloc_local("__record");
560                self.emit(Op::StoreLocal(slot));
561                for f in fields {
562                    self.emit(Op::LoadLocal(slot));
563                    let fi = self.pool.field(&f.name);
564                    self.emit(Op::GetField(fi));
565                    let sub_fails = self.compile_pattern_test(&f.pattern, bindings);
566                    fails.extend(sub_fails);
567                }
568            }
569            a::Pattern::PTuple { items } => {
570                let slot = self.alloc_local("__tuple");
571                self.emit(Op::StoreLocal(slot));
572                for (i, item) in items.iter().enumerate() {
573                    self.emit(Op::LoadLocal(slot));
574                    self.emit(Op::GetElem(i as u16));
575                    let sub_fails = self.compile_pattern_test(item, bindings);
576                    fails.extend(sub_fails);
577                }
578            }
579        }
580        fails
581    }
582
583    /// Compile a Lambda: collect free variables that resolve to outer-scope
584    /// locals, register a synthetic function, emit MakeClosure with the
585    /// captured values pushed in order.
586    fn compile_lambda(&mut self, params: &[a::Param], body: &a::CExpr) {
587        // Free vars = vars referenced in body that aren't bound locally.
588        let mut bound: std::collections::HashSet<String> = params.iter().map(|p| p.name.clone()).collect();
589        let mut frees: Vec<String> = Vec::new();
590        free_vars(body, &mut bound, &mut frees);
591
592        // Filter to those that are in the enclosing locals (captures).
593        // Don't exclude names that *also* exist in `function_names`:
594        // if the name is in `locals`, the local shadows the global
595        // within this scope, and the lambda needs to capture the
596        // local's value, not the global fn. (#339) Names that are
597        // ONLY in `function_names` (no local) stay external — the
598        // lambda's body resolves them at call time, same as the
599        // enclosing fn would.
600        let captures: Vec<String> = frees.into_iter()
601            .filter(|n| self.locals.contains_key(n))
602            .collect();
603
604        // Allocate a fresh fn_id by appending a placeholder Function.
605        let fn_id = self.next_fn_id.len() as u32;
606        self.next_fn_id.push(Function {
607            name: format!("__lambda_{fn_id}"),
608            arity: (captures.len() + params.len()) as u16,
609            locals_count: 0,
610            code: Vec::new(),
611            effects: Vec::new(),
612            // See #222: filled in at the end of the compile pass.
613            body_hash: crate::program::ZERO_BODY_HASH,
614            // Lambdas don't carry refinements at the surface today
615            // (closure params don't accept `Type{x | ...}` syntax in
616            // the parser). #209 stays focused on top-level fn decls;
617            // closure-param refinements are a follow-up.
618            refinements: Vec::new(),
619        });
620
621        // Emit code at the lambda site: load each captured local, then MakeClosure.
622        for c in &captures {
623            let slot = *self.locals.get(c).expect("free var must be in scope");
624            self.emit(Op::LoadLocal(slot));
625        }
626        self.emit(Op::MakeClosure { fn_id, capture_count: captures.len() as u16 });
627
628        // Queue the body for later compilation.
629        self.pending_lambdas.push(PendingLambda {
630            fn_id,
631            capture_names: captures,
632            params: params.to_vec(),
633            body: body.clone(),
634        });
635    }
636
637    /// Higher-order stdlib ops on Result/Option whose function arg is a
638    /// closure. Emit inline: pattern-match on the variant, invoke the
639    /// closure when applicable, return wrapped result.
640    fn try_emit_higher_order(
641        &mut self,
642        module: &str,
643        op: &str,
644        args: &[a::CExpr],
645        node_id_idx: u32,
646    ) -> bool {
647        match (module, op) {
648            ("result", "map") => self.emit_variant_map(args, "Ok", true),
649            ("result", "and_then") => self.emit_variant_map(args, "Ok", false),
650            ("result", "map_err") => self.emit_variant_map(args, "Err", true),
651            ("result", "or_else") => self.emit_variant_or_else(args, "Err", 1),
652            ("option", "map") => self.emit_variant_map(args, "Some", true),
653            ("option", "and_then") => self.emit_variant_map(args, "Some", false),
654            ("option", "or_else") => self.emit_variant_or_else(args, "None", 0),
655            ("option", "unwrap_or_else") => self.emit_option_unwrap_or_else(args),
656            ("list", "map") => self.emit_list_map(args),
657            ("list", "par_map") => self.emit_list_par_map(args),
658            ("list", "sort_by") => self.emit_list_sort_by(args),
659            ("list", "filter") => self.emit_list_filter(args),
660            ("list", "fold") => self.emit_list_fold(args),
661            ("iter", "from_list") => self.emit_iter_from_list(args),
662            ("iter", "unfold")    => self.emit_iter_unfold(args),
663            ("iter", "next")      => self.emit_iter_next(args),
664            ("iter", "is_empty")  => self.emit_iter_is_empty(args),
665            ("iter", "count")     => self.emit_iter_count(args),
666            ("iter", "take")      => self.emit_iter_take(args),
667            ("iter", "skip")      => self.emit_iter_skip(args),
668            ("iter", "to_list")   => self.emit_iter_to_list(args),
669            ("iter", "map")       => self.emit_iter_map(args),
670            ("iter", "filter")    => self.emit_iter_filter(args),
671            ("iter", "fold")      => self.emit_iter_fold(args),
672            ("map", "fold") => self.emit_map_fold(args, node_id_idx),
673            ("flow", "sequential") => self.emit_flow_sequential(args),
674            ("flow", "branch") => self.emit_flow_branch(args),
675            ("flow", "retry") => self.emit_flow_retry(args),
676            ("flow", "retry_with_backoff") => self.emit_flow_retry_with_backoff(args),
677            ("flow", "parallel") => self.emit_flow_parallel(args),
678            ("flow", "parallel_list") => self.emit_flow_parallel_list(args),
679            _ => return false,
680        }
681        true
682    }
683
684    /// `list.map(xs, f)` — inline loop applying `f` to each element.
685    /// Stack contract: pushes the resulting List.
686    fn emit_list_map(&mut self, args: &[a::CExpr]) {
687        // Compile xs and f, store both as locals.
688        self.compile_expr(&args[0], false);
689        let xs = self.alloc_local("__lm_xs");
690        self.emit(Op::StoreLocal(xs));
691        self.compile_expr(&args[1], false);
692        let f = self.alloc_local("__lm_f");
693        self.emit(Op::StoreLocal(f));
694
695        // out := []
696        self.emit(Op::MakeList(0));
697        let out = self.alloc_local("__lm_out");
698        self.emit(Op::StoreLocal(out));
699
700        // i := 0
701        let zero = self.pool.int(0);
702        self.emit(Op::PushConst(zero));
703        let i = self.alloc_local("__lm_i");
704        self.emit(Op::StoreLocal(i));
705
706        // loop_top: while i < len(xs) { ... }
707        let loop_top = self.code.len();
708        self.emit(Op::LoadLocal(i));
709        self.emit(Op::LoadLocal(xs));
710        self.emit(Op::GetListLen);
711        self.emit(Op::NumLt);
712        let j_exit = self.code.len();
713        self.emit(Op::JumpIfNot(0));
714
715        // body: out := out ++ [f(xs[i])]
716        let nid = self.pool.node_id("n_list_map");
717        self.emit(Op::LoadLocal(out));
718        self.emit(Op::LoadLocal(f));
719        self.emit(Op::LoadLocal(xs));
720        self.emit(Op::LoadLocal(i));
721        self.emit(Op::GetListElemDyn);
722        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
723        self.emit(Op::ListAppend);
724        self.emit(Op::StoreLocal(out));
725
726        // i := i + 1
727        self.emit(Op::LoadLocal(i));
728        let one = self.pool.int(1);
729        self.emit(Op::PushConst(one));
730        self.emit(Op::NumAdd);
731        self.emit(Op::StoreLocal(i));
732
733        // jump back
734        let jump_back = self.code.len();
735        let back = (loop_top as i32) - (jump_back as i32 + 1);
736        self.emit(Op::Jump(back));
737
738        // exit: patch j_exit, push out
739        let exit_target = self.code.len() as i32;
740        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
741            *off = exit_target - (j_exit as i32 + 1);
742        }
743        self.emit(Op::LoadLocal(out));
744    }
745
746    /// `list.par_map(xs, f)` (#305 slice 1). Pushes `xs` and `f`,
747    /// then emits a single `Op::ParallelMap` — the VM applies `f`
748    /// to each element on OS-thread tasks, capped by
749    /// `LEX_PAR_MAX_CONCURRENCY`. Returns the result list in input
750    /// order.
751    fn emit_list_par_map(&mut self, args: &[a::CExpr]) {
752        self.compile_expr(&args[0], false);
753        self.compile_expr(&args[1], false);
754        let nid = self.pool.node_id("n_list_par_map");
755        self.emit(Op::ParallelMap { node_id_idx: nid });
756    }
757
758    /// `list.sort_by(xs, f)` (#338). Pushes `xs` and the key-fn
759    /// `f`, then emits a single `Op::SortByKey` — the VM invokes
760    /// `f` on each element to derive a sortable key, stable-sorts
761    /// by key, and returns the values in sorted order. Keys must
762    /// resolve to `Int` / `Float` / `Str`; mixed-type pairs are
763    /// treated as equal by the comparator (preserving insertion
764    /// order via the stable sort).
765    fn emit_list_sort_by(&mut self, args: &[a::CExpr]) {
766        self.compile_expr(&args[0], false);
767        self.compile_expr(&args[1], false);
768        let nid = self.pool.node_id("n_list_sort_by");
769        self.emit(Op::SortByKey { node_id_idx: nid });
770    }
771
772    /// `list.filter(xs, pred)` — keep elements where pred returns true.
773    fn emit_list_filter(&mut self, args: &[a::CExpr]) {
774        self.compile_expr(&args[0], false);
775        let xs = self.alloc_local("__lf_xs");
776        self.emit(Op::StoreLocal(xs));
777        self.compile_expr(&args[1], false);
778        let f = self.alloc_local("__lf_f");
779        self.emit(Op::StoreLocal(f));
780
781        self.emit(Op::MakeList(0));
782        let out = self.alloc_local("__lf_out");
783        self.emit(Op::StoreLocal(out));
784
785        let zero = self.pool.int(0);
786        self.emit(Op::PushConst(zero));
787        let i = self.alloc_local("__lf_i");
788        self.emit(Op::StoreLocal(i));
789
790        let loop_top = self.code.len();
791        self.emit(Op::LoadLocal(i));
792        self.emit(Op::LoadLocal(xs));
793        self.emit(Op::GetListLen);
794        self.emit(Op::NumLt);
795        let j_exit = self.code.len();
796        self.emit(Op::JumpIfNot(0));
797
798        // x := xs[i]
799        self.emit(Op::LoadLocal(xs));
800        self.emit(Op::LoadLocal(i));
801        self.emit(Op::GetListElemDyn);
802        let x = self.alloc_local("__lf_x");
803        self.emit(Op::StoreLocal(x));
804
805        // if pred(x) { out := out ++ [x] }
806        let nid = self.pool.node_id("n_list_filter");
807        self.emit(Op::LoadLocal(f));
808        self.emit(Op::LoadLocal(x));
809        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
810        let j_skip = self.code.len();
811        self.emit(Op::JumpIfNot(0));
812
813        self.emit(Op::LoadLocal(out));
814        self.emit(Op::LoadLocal(x));
815        self.emit(Op::ListAppend);
816        self.emit(Op::StoreLocal(out));
817
818        let skip_target = self.code.len() as i32;
819        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
820            *off = skip_target - (j_skip as i32 + 1);
821        }
822
823        // i := i + 1
824        self.emit(Op::LoadLocal(i));
825        let one = self.pool.int(1);
826        self.emit(Op::PushConst(one));
827        self.emit(Op::NumAdd);
828        self.emit(Op::StoreLocal(i));
829
830        let jump_back = self.code.len();
831        let back = (loop_top as i32) - (jump_back as i32 + 1);
832        self.emit(Op::Jump(back));
833
834        let exit_target = self.code.len() as i32;
835        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
836            *off = exit_target - (j_exit as i32 + 1);
837        }
838        self.emit(Op::LoadLocal(out));
839    }
840
841    /// `list.fold(xs, init, f)` — left fold with two-arg combiner.
842    fn emit_list_fold(&mut self, args: &[a::CExpr]) {
843        // args: xs, init, f
844        self.compile_expr(&args[0], false);
845        let xs = self.alloc_local("__lo_xs");
846        self.emit(Op::StoreLocal(xs));
847        self.compile_expr(&args[1], false);
848        let acc = self.alloc_local("__lo_acc");
849        self.emit(Op::StoreLocal(acc));
850        self.compile_expr(&args[2], false);
851        let f = self.alloc_local("__lo_f");
852        self.emit(Op::StoreLocal(f));
853
854        let zero = self.pool.int(0);
855        self.emit(Op::PushConst(zero));
856        let i = self.alloc_local("__lo_i");
857        self.emit(Op::StoreLocal(i));
858
859        let loop_top = self.code.len();
860        self.emit(Op::LoadLocal(i));
861        self.emit(Op::LoadLocal(xs));
862        self.emit(Op::GetListLen);
863        self.emit(Op::NumLt);
864        let j_exit = self.code.len();
865        self.emit(Op::JumpIfNot(0));
866
867        // acc := f(acc, xs[i])
868        let nid = self.pool.node_id("n_list_fold");
869        self.emit(Op::LoadLocal(f));
870        self.emit(Op::LoadLocal(acc));
871        self.emit(Op::LoadLocal(xs));
872        self.emit(Op::LoadLocal(i));
873        self.emit(Op::GetListElemDyn);
874        self.emit(Op::CallClosure { arity: 2, node_id_idx: nid });
875        self.emit(Op::StoreLocal(acc));
876
877        // i := i + 1
878        self.emit(Op::LoadLocal(i));
879        let one = self.pool.int(1);
880        self.emit(Op::PushConst(one));
881        self.emit(Op::NumAdd);
882        self.emit(Op::StoreLocal(i));
883
884        let jump_back = self.code.len();
885        let back = (loop_top as i32) - (jump_back as i32 + 1);
886        self.emit(Op::Jump(back));
887
888        let exit_target = self.code.len() as i32;
889        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
890            *off = exit_target - (j_exit as i32 + 1);
891        }
892        self.emit(Op::LoadLocal(acc));
893    }
894
895    // ── Iter[T] operations (#364) ─────────────────────────────────────────
896    // Internal representation: `Value::Variant("__IterEager", [list, idx])`
897    // for the eager form (a List backing store + Int cursor) and
898    // `Value::Variant("__IterLazy", [seed, step_closure])` for the lazy form
899    // produced by `iter.unfold` (#376). Both are tagged variants so each op
900    // can `TestVariant` at runtime to dispatch. The names start with `__` so
901    // they can't be written by user code (uppercase ASCII-letter is required
902    // for constructor names, and the underscores keep them out of the
903    // user-namespace by convention).
904
905    /// `iter.from_list(xs)` — wrap a list in an eager iterator at position 0.
906    fn emit_iter_from_list(&mut self, args: &[a::CExpr]) {
907        self.compile_expr(&args[0], false);
908        let zero = self.pool.int(0);
909        self.emit(Op::PushConst(zero));
910        let v = self.pool.variant("__IterEager");
911        self.emit(Op::MakeVariant { name_idx: v, arity: 2 });
912    }
913
914    /// `iter.next(it)` — advance one step; returns `Option[(T, Iter[T])]`.
915    ///
916    /// Dispatches on the iter's variant tag:
917    /// - `__IterLazy(seed, step)` (#376) → invoke `step(seed)`. On
918    ///   `Some((t, s'))` wrap as `Some((t, __IterLazy(s', step)))`; on
919    ///   `None` propagate `None`. The seed advances forward each call.
920    /// - `__IterCursor(handle)` (#379) → effect-call `sql.cursor_next(handle)`
921    ///   which returns `Option[T]`. On `Some(row)` wrap as
922    ///   `Some((row, __IterCursor(handle)))`; on `None` propagate. Handle
923    ///   stays stable across calls — state is server-side / mpsc-buffered.
924    /// - `__IterEager(list, idx)` → existing positional cursor.
925    fn emit_iter_next(&mut self, args: &[a::CExpr]) {
926        self.compile_expr(&args[0], false);
927        let it = self.alloc_local("__in_it");
928        self.emit(Op::StoreLocal(it));
929
930        // Dispatch: TestVariant pops; we Dup to keep the iter around.
931        self.emit(Op::LoadLocal(it));
932        self.emit(Op::Dup);
933        let lazy_name = self.pool.variant("__IterLazy");
934        self.emit(Op::TestVariant(lazy_name));
935        let j_to_check_cursor = self.code.len();
936        self.emit(Op::JumpIfNot(0));
937
938        // ── lazy path ────────────────────────────────────────────────
939        // The Dup'd iter is on stack but we've consumed it via TestVariant,
940        // so reload from the local.
941        self.emit(Op::LoadLocal(it));
942        self.emit(Op::GetVariantArg(0)); // seed
943        let seed = self.alloc_local("__in_seed");
944        self.emit(Op::StoreLocal(seed));
945
946        self.emit(Op::LoadLocal(it));
947        self.emit(Op::GetVariantArg(1)); // step closure
948        let step = self.alloc_local("__in_step");
949        self.emit(Op::StoreLocal(step));
950
951        // Call step(seed) → Option[(T, S)].
952        let nid_lazy = self.pool.node_id("n_iter_next_lazy");
953        self.emit(Op::LoadLocal(step));
954        self.emit(Op::LoadLocal(seed));
955        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid_lazy });
956        let opt = self.alloc_local("__in_opt");
957        self.emit(Op::StoreLocal(opt));
958
959        // If `step` returned None, propagate it directly.
960        self.emit(Op::LoadLocal(opt));
961        let some_name = self.pool.variant("Some");
962        self.emit(Op::TestVariant(some_name));
963        let j_lazy_none = self.code.len();
964        self.emit(Op::JumpIfNot(0));
965
966        // Some((t, new_seed)) — extract the inner tuple, repackage as
967        // Some((t, __IterLazy(new_seed, step))) so the next call advances.
968        self.emit(Op::LoadLocal(opt));
969        self.emit(Op::GetVariantArg(0));     // (t, new_seed)
970        let pair = self.alloc_local("__in_pair");
971        self.emit(Op::StoreLocal(pair));
972
973        self.emit(Op::LoadLocal(pair));
974        self.emit(Op::GetElem(0));           // t
975        self.emit(Op::LoadLocal(pair));
976        self.emit(Op::GetElem(1));           // new_seed
977        self.emit(Op::LoadLocal(step));      // step closure
978        let lazy_v = self.pool.variant("__IterLazy");
979        self.emit(Op::MakeVariant { name_idx: lazy_v, arity: 2 }); // __IterLazy(new_seed, step)
980        self.emit(Op::MakeTuple(2));         // (t, new_iter)
981        let some_v = self.pool.variant("Some");
982        self.emit(Op::MakeVariant { name_idx: some_v, arity: 1 });
983        let j_after_lazy = self.code.len();
984        self.emit(Op::Jump(0));
985
986        // Lazy → None: just forward the None.
987        let none_t = self.code.len() as i32;
988        if let Op::JumpIfNot(off) = &mut self.code[j_lazy_none] {
989            *off = none_t - (j_lazy_none as i32 + 1);
990        }
991        let none_v = self.pool.variant("None");
992        self.emit(Op::MakeVariant { name_idx: none_v, arity: 0 });
993        let j_after_lazy_none = self.code.len();
994        self.emit(Op::Jump(0));
995
996        // ── cursor path (#379) ───────────────────────────────────────
997        let cursor_check_t = self.code.len() as i32;
998        if let Op::JumpIfNot(off) = &mut self.code[j_to_check_cursor] {
999            *off = cursor_check_t - (j_to_check_cursor as i32 + 1);
1000        }
1001
1002        self.emit(Op::LoadLocal(it));
1003        self.emit(Op::Dup);
1004        let cursor_name = self.pool.variant("__IterCursor");
1005        self.emit(Op::TestVariant(cursor_name));
1006        let j_to_eager = self.code.len();
1007        self.emit(Op::JumpIfNot(0));
1008
1009        // Cursor path: extract handle, effect-call sql.cursor_next(handle).
1010        // The handler returns Option[T] directly. We then wrap as
1011        // Some((T, __IterCursor(handle))) or forward None.
1012        self.emit(Op::LoadLocal(it));
1013        self.emit(Op::GetVariantArg(0));     // handle
1014        let handle = self.alloc_local("__in_handle");
1015        self.emit(Op::StoreLocal(handle));
1016
1017        let kind_idx = self.pool.str("sql");
1018        let op_idx = self.pool.str("cursor_next");
1019        let nid_cursor = self.pool.node_id("n_iter_next_cursor");
1020        self.emit(Op::LoadLocal(handle));
1021        self.emit(Op::EffectCall {
1022            kind_idx,
1023            op_idx,
1024            arity: 1,
1025            node_id_idx: nid_cursor,
1026        });
1027        let cur_opt = self.alloc_local("__in_cur_opt");
1028        self.emit(Op::StoreLocal(cur_opt));
1029
1030        self.emit(Op::LoadLocal(cur_opt));
1031        let some_c = self.pool.variant("Some");
1032        self.emit(Op::TestVariant(some_c));
1033        let j_cursor_none = self.code.len();
1034        self.emit(Op::JumpIfNot(0));
1035
1036        // Some(row): build Some((row, __IterCursor(handle)))
1037        self.emit(Op::LoadLocal(cur_opt));
1038        self.emit(Op::GetVariantArg(0));     // row
1039        self.emit(Op::LoadLocal(handle));
1040        let cursor_v = self.pool.variant("__IterCursor");
1041        self.emit(Op::MakeVariant { name_idx: cursor_v, arity: 1 });
1042        self.emit(Op::MakeTuple(2));         // (row, __IterCursor(handle))
1043        let some_c2 = self.pool.variant("Some");
1044        self.emit(Op::MakeVariant { name_idx: some_c2, arity: 1 });
1045        let j_after_cursor = self.code.len();
1046        self.emit(Op::Jump(0));
1047
1048        // Cursor → None
1049        let cursor_none_t = self.code.len() as i32;
1050        if let Op::JumpIfNot(off) = &mut self.code[j_cursor_none] {
1051            *off = cursor_none_t - (j_cursor_none as i32 + 1);
1052        }
1053        let none_c = self.pool.variant("None");
1054        self.emit(Op::MakeVariant { name_idx: none_c, arity: 0 });
1055        let j_after_cursor_none = self.code.len();
1056        self.emit(Op::Jump(0));
1057
1058        // ── eager path ───────────────────────────────────────────────
1059        let eager_t = self.code.len() as i32;
1060        if let Op::JumpIfNot(off) = &mut self.code[j_to_eager] {
1061            *off = eager_t - (j_to_eager as i32 + 1);
1062        }
1063
1064        self.emit(Op::LoadLocal(it));
1065        self.emit(Op::GetVariantArg(0));
1066        let list = self.alloc_local("__in_list");
1067        self.emit(Op::StoreLocal(list));
1068
1069        self.emit(Op::LoadLocal(it));
1070        self.emit(Op::GetVariantArg(1));
1071        let idx = self.alloc_local("__in_idx");
1072        self.emit(Op::StoreLocal(idx));
1073
1074        // if idx < len(list)
1075        self.emit(Op::LoadLocal(idx));
1076        self.emit(Op::LoadLocal(list));
1077        self.emit(Op::GetListLen);
1078        self.emit(Op::NumLt);
1079        let j_eager_else = self.code.len();
1080        self.emit(Op::JumpIfNot(0));
1081
1082        // Some((item, __IterEager(list, idx+1)))
1083        self.emit(Op::LoadLocal(list));
1084        self.emit(Op::LoadLocal(idx));
1085        self.emit(Op::GetListElemDyn);
1086
1087        self.emit(Op::LoadLocal(list));
1088        self.emit(Op::LoadLocal(idx));
1089        let one = self.pool.int(1);
1090        self.emit(Op::PushConst(one));
1091        self.emit(Op::NumAdd);
1092        let eager_v = self.pool.variant("__IterEager");
1093        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1094        self.emit(Op::MakeTuple(2));
1095        let some_e = self.pool.variant("Some");
1096        self.emit(Op::MakeVariant { name_idx: some_e, arity: 1 });
1097        let j_after_eager = self.code.len();
1098        self.emit(Op::Jump(0));
1099
1100        // Eager → None
1101        let eager_none_t = self.code.len() as i32;
1102        if let Op::JumpIfNot(off) = &mut self.code[j_eager_else] {
1103            *off = eager_none_t - (j_eager_else as i32 + 1);
1104        }
1105        let none_e = self.pool.variant("None");
1106        self.emit(Op::MakeVariant { name_idx: none_e, arity: 0 });
1107
1108        // Converge all paths.
1109        let end = self.code.len() as i32;
1110        if let Op::Jump(off) = &mut self.code[j_after_lazy] {
1111            *off = end - (j_after_lazy as i32 + 1);
1112        }
1113        if let Op::Jump(off) = &mut self.code[j_after_lazy_none] {
1114            *off = end - (j_after_lazy_none as i32 + 1);
1115        }
1116        if let Op::Jump(off) = &mut self.code[j_after_cursor] {
1117            *off = end - (j_after_cursor as i32 + 1);
1118        }
1119        if let Op::Jump(off) = &mut self.code[j_after_cursor_none] {
1120            *off = end - (j_after_cursor_none as i32 + 1);
1121        }
1122        if let Op::Jump(off) = &mut self.code[j_after_eager] {
1123            *off = end - (j_after_eager as i32 + 1);
1124        }
1125    }
1126
1127    /// `iter.unfold(seed, step)` — lazy iterator that calls `step(seed)` on
1128    /// each `iter.next` and threads the new seed forward. Internal value
1129    /// shape: `__IterLazy(seed, step)`. Step has type `(S) -> Option[(T, S)]`;
1130    /// returning `None` ends the iteration (#376).
1131    fn emit_iter_unfold(&mut self, args: &[a::CExpr]) {
1132        self.compile_expr(&args[0], false); // seed
1133        self.compile_expr(&args[1], false); // step
1134        let lazy = self.pool.variant("__IterLazy");
1135        self.emit(Op::MakeVariant { name_idx: lazy, arity: 2 });
1136    }
1137
1138    /// `iter.is_empty(it)` — true iff no further element. v1 supports the
1139    /// eager form O(1); on a lazy iter the seed sits in slot 0 and is not a
1140    /// List, so the VM trips on `GetListLen` rather than returning a wrong
1141    /// answer. Callers needing lazy support should materialize with
1142    /// `iter.to_list` first or call `iter.next` and pattern-match.
1143    fn emit_iter_is_empty(&mut self, args: &[a::CExpr]) {
1144        self.compile_expr(&args[0], false);
1145        let it = self.alloc_local("__ie_it");
1146        self.emit(Op::StoreLocal(it));
1147
1148        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1)); // idx
1149        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0)); // list
1150        self.emit(Op::GetListLen);                                     // len
1151        self.emit(Op::NumLt);                                          // idx < len
1152        self.emit(Op::BoolNot);                                        // NOT(idx < len)
1153    }
1154
1155    /// `iter.count(it)` — number of remaining elements (v1: eager-only).
1156    fn emit_iter_count(&mut self, args: &[a::CExpr]) {
1157        self.compile_expr(&args[0], false);
1158        let it = self.alloc_local("__ic_it");
1159        self.emit(Op::StoreLocal(it));
1160
1161        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1162        self.emit(Op::GetListLen);                                     // len
1163        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1)); // idx
1164        self.emit(Op::NumSub);                                         // len - idx
1165    }
1166
1167    /// `iter.take(it, n)` — collect up to n elements, return as new Iter.
1168    fn emit_iter_take(&mut self, args: &[a::CExpr]) {
1169        self.compile_expr(&args[0], false);
1170        let it   = self.alloc_local("__itk_it");
1171        self.emit(Op::StoreLocal(it));
1172
1173        self.compile_expr(&args[1], false);
1174        let n    = self.alloc_local("__itk_n");
1175        self.emit(Op::StoreLocal(n));
1176
1177        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1178        let list = self.alloc_local("__itk_list");
1179        self.emit(Op::StoreLocal(list));
1180
1181        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1182        let i    = self.alloc_local("__itk_i");
1183        self.emit(Op::StoreLocal(i));
1184
1185        self.emit(Op::MakeList(0));
1186        let out  = self.alloc_local("__itk_out");
1187        self.emit(Op::StoreLocal(out));
1188
1189        let zero = self.pool.int(0);
1190        self.emit(Op::PushConst(zero));
1191        let cnt  = self.alloc_local("__itk_cnt");
1192        self.emit(Op::StoreLocal(cnt));
1193
1194        let loop_top = self.code.len();
1195
1196        // while cnt < n
1197        self.emit(Op::LoadLocal(cnt));
1198        self.emit(Op::LoadLocal(n));
1199        self.emit(Op::NumLt);
1200        let j_exit_n = self.code.len();
1201        self.emit(Op::JumpIfNot(0));
1202
1203        // AND i < len(list)
1204        self.emit(Op::LoadLocal(i));
1205        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1206        self.emit(Op::NumLt);
1207        let j_exit_l = self.code.len();
1208        self.emit(Op::JumpIfNot(0));
1209
1210        // out = out ++ [list[i]]
1211        self.emit(Op::LoadLocal(out));
1212        self.emit(Op::LoadLocal(list));
1213        self.emit(Op::LoadLocal(i));
1214        self.emit(Op::GetListElemDyn);
1215        self.emit(Op::ListAppend);
1216        self.emit(Op::StoreLocal(out));
1217
1218        let one = self.pool.int(1);
1219        // i = i + 1
1220        self.emit(Op::LoadLocal(i));
1221        self.emit(Op::PushConst(one));
1222        self.emit(Op::NumAdd);
1223        self.emit(Op::StoreLocal(i));
1224        // cnt = cnt + 1
1225        self.emit(Op::LoadLocal(cnt));
1226        self.emit(Op::PushConst(one));
1227        self.emit(Op::NumAdd);
1228        self.emit(Op::StoreLocal(cnt));
1229
1230        let jback = self.code.len();
1231        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1232
1233        let exit_t = self.code.len() as i32;
1234        if let Op::JumpIfNot(off) = &mut self.code[j_exit_n] { *off = exit_t - (j_exit_n as i32 + 1); }
1235        if let Op::JumpIfNot(off) = &mut self.code[j_exit_l] { *off = exit_t - (j_exit_l as i32 + 1); }
1236
1237        // return new __IterEager(out, 0)
1238        self.emit(Op::LoadLocal(out));
1239        self.emit(Op::PushConst(zero));
1240        let eager_v = self.pool.variant("__IterEager");
1241        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1242    }
1243
1244    /// `iter.skip(it, n)` — advance cursor by n (or to end), return new Iter.
1245    fn emit_iter_skip(&mut self, args: &[a::CExpr]) {
1246        self.compile_expr(&args[0], false);
1247        let it   = self.alloc_local("__isk_it");
1248        self.emit(Op::StoreLocal(it));
1249
1250        self.compile_expr(&args[1], false);
1251        let n    = self.alloc_local("__isk_n");
1252        self.emit(Op::StoreLocal(n));
1253
1254        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1255        let list = self.alloc_local("__isk_list");
1256        self.emit(Op::StoreLocal(list));
1257
1258        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1259        let idx  = self.alloc_local("__isk_idx");
1260        self.emit(Op::StoreLocal(idx));
1261
1262        // raw = idx + n
1263        self.emit(Op::LoadLocal(idx));
1264        self.emit(Op::LoadLocal(n));
1265        self.emit(Op::NumAdd);
1266        let raw  = self.alloc_local("__isk_raw");
1267        self.emit(Op::StoreLocal(raw));
1268
1269        // new_idx = if raw < len then raw else len
1270        self.emit(Op::LoadLocal(raw));
1271        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1272        self.emit(Op::NumLt);
1273        let j_use_raw = self.code.len();
1274        self.emit(Op::JumpIf(0));
1275
1276        // use len
1277        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1278        let j_end = self.code.len();
1279        self.emit(Op::Jump(0));
1280
1281        // use raw
1282        let raw_t = self.code.len() as i32;
1283        if let Op::JumpIf(off) = &mut self.code[j_use_raw] { *off = raw_t - (j_use_raw as i32 + 1); }
1284        self.emit(Op::LoadLocal(raw));
1285
1286        let end_t = self.code.len() as i32;
1287        if let Op::Jump(off) = &mut self.code[j_end] { *off = end_t - (j_end as i32 + 1); }
1288
1289        // new_idx on stack; build new __IterEager(list, new_idx)
1290        let new_idx = self.alloc_local("__isk_ni");
1291        self.emit(Op::StoreLocal(new_idx));
1292        self.emit(Op::LoadLocal(list));
1293        self.emit(Op::LoadLocal(new_idx));
1294        let eager_v = self.pool.variant("__IterEager");
1295        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1296    }
1297
1298    /// `iter.to_list(it)` — materialise remaining elements into a List.
1299    ///
1300    /// Dispatches on the iter variant (#376):
1301    /// - `__IterLazy`: repeatedly call `step(seed)`; on `Some((t, s'))` append
1302    ///   `t` and continue with `s'`; on `None` stop. May hang on truly
1303    ///   infinite producers — that's documented as a v1 limitation, the
1304    ///   step-limit-protected caller is what catches misuse.
1305    /// - `__IterEager`: slice the backing list from `idx` onward (O(n) walk).
1306    fn emit_iter_to_list(&mut self, args: &[a::CExpr]) {
1307        self.compile_expr(&args[0], false);
1308        let it = self.alloc_local("__itl_it");
1309        self.emit(Op::StoreLocal(it));
1310
1311        // Build the output list up-front, shared across both paths.
1312        self.emit(Op::MakeList(0));
1313        let out = self.alloc_local("__itl_out");
1314        self.emit(Op::StoreLocal(out));
1315
1316        // Dispatch on variant tag.
1317        self.emit(Op::LoadLocal(it));
1318        let lazy_name = self.pool.variant("__IterLazy");
1319        self.emit(Op::TestVariant(lazy_name));
1320        let j_to_eager = self.code.len();
1321        self.emit(Op::JumpIfNot(0));
1322
1323        // ── lazy path ─────────────────────────────────────────────────
1324        // seed and step closure live in locals; we update seed each iteration.
1325        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1326        let seed = self.alloc_local("__itl_seed");
1327        self.emit(Op::StoreLocal(seed));
1328
1329        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1330        let step = self.alloc_local("__itl_step");
1331        self.emit(Op::StoreLocal(step));
1332
1333        let lazy_loop = self.code.len();
1334        let nid_lazy = self.pool.node_id("n_iter_to_list_lazy");
1335        self.emit(Op::LoadLocal(step));
1336        self.emit(Op::LoadLocal(seed));
1337        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid_lazy });
1338        let opt = self.alloc_local("__itl_opt");
1339        self.emit(Op::StoreLocal(opt));
1340
1341        // If None, drop out of the lazy loop.
1342        self.emit(Op::LoadLocal(opt));
1343        let some_name = self.pool.variant("Some");
1344        self.emit(Op::TestVariant(some_name));
1345        let j_lazy_exit = self.code.len();
1346        self.emit(Op::JumpIfNot(0));
1347
1348        // Some((t, new_seed)): append t to out, replace seed.
1349        self.emit(Op::LoadLocal(opt));
1350        self.emit(Op::GetVariantArg(0));
1351        let pair = self.alloc_local("__itl_pair");
1352        self.emit(Op::StoreLocal(pair));
1353
1354        self.emit(Op::LoadLocal(out));
1355        self.emit(Op::LoadLocal(pair)); self.emit(Op::GetElem(0));
1356        self.emit(Op::ListAppend);
1357        self.emit(Op::StoreLocal(out));
1358
1359        self.emit(Op::LoadLocal(pair)); self.emit(Op::GetElem(1));
1360        self.emit(Op::StoreLocal(seed));
1361
1362        let jback_lazy = self.code.len();
1363        self.emit(Op::Jump((lazy_loop as i32) - (jback_lazy as i32 + 1)));
1364
1365        let lazy_exit_t = self.code.len() as i32;
1366        if let Op::JumpIfNot(off) = &mut self.code[j_lazy_exit] {
1367            *off = lazy_exit_t - (j_lazy_exit as i32 + 1);
1368        }
1369        let j_after_lazy = self.code.len();
1370        self.emit(Op::Jump(0));
1371
1372        // ── eager path ────────────────────────────────────────────────
1373        let eager_t = self.code.len() as i32;
1374        if let Op::JumpIfNot(off) = &mut self.code[j_to_eager] {
1375            *off = eager_t - (j_to_eager as i32 + 1);
1376        }
1377
1378        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1379        let list = self.alloc_local("__itl_list");
1380        self.emit(Op::StoreLocal(list));
1381
1382        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1383        let i = self.alloc_local("__itl_i");
1384        self.emit(Op::StoreLocal(i));
1385
1386        let loop_top = self.code.len();
1387        self.emit(Op::LoadLocal(i));
1388        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1389        self.emit(Op::NumLt);
1390        let j_exit = self.code.len();
1391        self.emit(Op::JumpIfNot(0));
1392
1393        self.emit(Op::LoadLocal(out));
1394        self.emit(Op::LoadLocal(list));
1395        self.emit(Op::LoadLocal(i));
1396        self.emit(Op::GetListElemDyn);
1397        self.emit(Op::ListAppend);
1398        self.emit(Op::StoreLocal(out));
1399
1400        self.emit(Op::LoadLocal(i));
1401        let one = self.pool.int(1);
1402        self.emit(Op::PushConst(one));
1403        self.emit(Op::NumAdd);
1404        self.emit(Op::StoreLocal(i));
1405
1406        let jback = self.code.len();
1407        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1408
1409        let exit_t = self.code.len() as i32;
1410        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
1411            *off = exit_t - (j_exit as i32 + 1);
1412        }
1413
1414        // Converge: lazy path falls through here too.
1415        let converge = self.code.len() as i32;
1416        if let Op::Jump(off) = &mut self.code[j_after_lazy] {
1417            *off = converge - (j_after_lazy as i32 + 1);
1418        }
1419        self.emit(Op::LoadLocal(out));
1420    }
1421
1422    /// `iter.map(it, f)` — apply `f` to each remaining element; returns new Iter.
1423    fn emit_iter_map(&mut self, args: &[a::CExpr]) {
1424        self.compile_expr(&args[0], false);
1425        let it   = self.alloc_local("__im_it");
1426        self.emit(Op::StoreLocal(it));
1427
1428        self.compile_expr(&args[1], false);
1429        let f    = self.alloc_local("__im_f");
1430        self.emit(Op::StoreLocal(f));
1431
1432        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1433        let list = self.alloc_local("__im_list");
1434        self.emit(Op::StoreLocal(list));
1435
1436        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1437        let i    = self.alloc_local("__im_i");
1438        self.emit(Op::StoreLocal(i));
1439
1440        self.emit(Op::MakeList(0));
1441        let out  = self.alloc_local("__im_out");
1442        self.emit(Op::StoreLocal(out));
1443
1444        let loop_top = self.code.len();
1445        self.emit(Op::LoadLocal(i));
1446        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1447        self.emit(Op::NumLt);
1448        let j_exit = self.code.len();
1449        self.emit(Op::JumpIfNot(0));
1450
1451        let nid = self.pool.node_id("n_iter_map");
1452        self.emit(Op::LoadLocal(out));
1453        self.emit(Op::LoadLocal(f));
1454        self.emit(Op::LoadLocal(list));
1455        self.emit(Op::LoadLocal(i));
1456        self.emit(Op::GetListElemDyn);
1457        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
1458        self.emit(Op::ListAppend);
1459        self.emit(Op::StoreLocal(out));
1460
1461        self.emit(Op::LoadLocal(i));
1462        let one = self.pool.int(1);
1463        self.emit(Op::PushConst(one));
1464        self.emit(Op::NumAdd);
1465        self.emit(Op::StoreLocal(i));
1466
1467        let jback = self.code.len();
1468        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1469
1470        let exit_t = self.code.len() as i32;
1471        if let Op::JumpIfNot(off) = &mut self.code[j_exit] { *off = exit_t - (j_exit as i32 + 1); }
1472
1473        let zero = self.pool.int(0);
1474        self.emit(Op::LoadLocal(out));
1475        self.emit(Op::PushConst(zero));
1476        let eager_v = self.pool.variant("__IterEager");
1477        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1478    }
1479
1480    /// `iter.filter(it, pred)` — keep elements where pred is true; returns new Iter.
1481    fn emit_iter_filter(&mut self, args: &[a::CExpr]) {
1482        self.compile_expr(&args[0], false);
1483        let it   = self.alloc_local("__if_it");
1484        self.emit(Op::StoreLocal(it));
1485
1486        self.compile_expr(&args[1], false);
1487        let f    = self.alloc_local("__if_f");
1488        self.emit(Op::StoreLocal(f));
1489
1490        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1491        let list = self.alloc_local("__if_list");
1492        self.emit(Op::StoreLocal(list));
1493
1494        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1495        let i    = self.alloc_local("__if_i");
1496        self.emit(Op::StoreLocal(i));
1497
1498        self.emit(Op::MakeList(0));
1499        let out  = self.alloc_local("__if_out");
1500        self.emit(Op::StoreLocal(out));
1501
1502        let loop_top = self.code.len();
1503        self.emit(Op::LoadLocal(i));
1504        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1505        self.emit(Op::NumLt);
1506        let j_exit = self.code.len();
1507        self.emit(Op::JumpIfNot(0));
1508
1509        // elem := list[i]
1510        self.emit(Op::LoadLocal(list));
1511        self.emit(Op::LoadLocal(i));
1512        self.emit(Op::GetListElemDyn);
1513        let x    = self.alloc_local("__if_x");
1514        self.emit(Op::StoreLocal(x));
1515
1516        let nid = self.pool.node_id("n_iter_filter");
1517        self.emit(Op::LoadLocal(f));
1518        self.emit(Op::LoadLocal(x));
1519        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
1520        let j_skip = self.code.len();
1521        self.emit(Op::JumpIfNot(0));
1522
1523        self.emit(Op::LoadLocal(out));
1524        self.emit(Op::LoadLocal(x));
1525        self.emit(Op::ListAppend);
1526        self.emit(Op::StoreLocal(out));
1527
1528        let skip_t = self.code.len() as i32;
1529        if let Op::JumpIfNot(off) = &mut self.code[j_skip] { *off = skip_t - (j_skip as i32 + 1); }
1530
1531        self.emit(Op::LoadLocal(i));
1532        let one = self.pool.int(1);
1533        self.emit(Op::PushConst(one));
1534        self.emit(Op::NumAdd);
1535        self.emit(Op::StoreLocal(i));
1536
1537        let jback = self.code.len();
1538        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1539
1540        let exit_t = self.code.len() as i32;
1541        if let Op::JumpIfNot(off) = &mut self.code[j_exit] { *off = exit_t - (j_exit as i32 + 1); }
1542
1543        let zero = self.pool.int(0);
1544        self.emit(Op::LoadLocal(out));
1545        self.emit(Op::PushConst(zero));
1546        let eager_v = self.pool.variant("__IterEager");
1547        self.emit(Op::MakeVariant { name_idx: eager_v, arity: 2 });
1548    }
1549
1550    /// `iter.fold(it, init, f)` — left fold over remaining elements.
1551    fn emit_iter_fold(&mut self, args: &[a::CExpr]) {
1552        self.compile_expr(&args[0], false);
1553        let it   = self.alloc_local("__ifo_it");
1554        self.emit(Op::StoreLocal(it));
1555
1556        self.compile_expr(&args[1], false);
1557        let acc  = self.alloc_local("__ifo_acc");
1558        self.emit(Op::StoreLocal(acc));
1559
1560        self.compile_expr(&args[2], false);
1561        let f    = self.alloc_local("__ifo_f");
1562        self.emit(Op::StoreLocal(f));
1563
1564        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(0));
1565        let list = self.alloc_local("__ifo_list");
1566        self.emit(Op::StoreLocal(list));
1567
1568        self.emit(Op::LoadLocal(it)); self.emit(Op::GetVariantArg(1));
1569        let i    = self.alloc_local("__ifo_i");
1570        self.emit(Op::StoreLocal(i));
1571
1572        let loop_top = self.code.len();
1573        self.emit(Op::LoadLocal(i));
1574        self.emit(Op::LoadLocal(list)); self.emit(Op::GetListLen);
1575        self.emit(Op::NumLt);
1576        let j_exit = self.code.len();
1577        self.emit(Op::JumpIfNot(0));
1578
1579        let nid = self.pool.node_id("n_iter_fold");
1580        self.emit(Op::LoadLocal(f));
1581        self.emit(Op::LoadLocal(acc));
1582        self.emit(Op::LoadLocal(list));
1583        self.emit(Op::LoadLocal(i));
1584        self.emit(Op::GetListElemDyn);
1585        self.emit(Op::CallClosure { arity: 2, node_id_idx: nid });
1586        self.emit(Op::StoreLocal(acc));
1587
1588        self.emit(Op::LoadLocal(i));
1589        let one = self.pool.int(1);
1590        self.emit(Op::PushConst(one));
1591        self.emit(Op::NumAdd);
1592        self.emit(Op::StoreLocal(i));
1593
1594        let jback = self.code.len();
1595        self.emit(Op::Jump((loop_top as i32) - (jback as i32 + 1)));
1596
1597        let exit_t = self.code.len() as i32;
1598        if let Op::JumpIfNot(off) = &mut self.code[j_exit] { *off = exit_t - (j_exit as i32 + 1); }
1599        self.emit(Op::LoadLocal(acc));
1600    }
1601
1602    /// `map.fold(m, init, f)` — left fold over `Map[K, V]` entries with a
1603    /// three-arg combiner `f(acc, k, v)`. Iteration order matches
1604    /// `map.entries` (BTreeMap-sorted by key). Materializes the entry
1605    /// list once via the runtime's `("map", "entries")` op, then runs
1606    /// the same inline loop as `list.fold`.
1607    fn emit_map_fold(&mut self, args: &[a::CExpr], node_id_idx: u32) {
1608        // xs := map.entries(m)
1609        self.compile_expr(&args[0], false);
1610        let map_kind = self.pool.str("map");
1611        let entries_op = self.pool.str("entries");
1612        self.emit(Op::EffectCall {
1613            kind_idx: map_kind,
1614            op_idx: entries_op,
1615            arity: 1,
1616            node_id_idx,
1617        });
1618        let xs = self.alloc_local("__mf_xs");
1619        self.emit(Op::StoreLocal(xs));
1620
1621        // acc := init
1622        self.compile_expr(&args[1], false);
1623        let acc = self.alloc_local("__mf_acc");
1624        self.emit(Op::StoreLocal(acc));
1625
1626        // f := <closure>
1627        self.compile_expr(&args[2], false);
1628        let f = self.alloc_local("__mf_f");
1629        self.emit(Op::StoreLocal(f));
1630
1631        // i := 0
1632        let zero = self.pool.int(0);
1633        self.emit(Op::PushConst(zero));
1634        let i = self.alloc_local("__mf_i");
1635        self.emit(Op::StoreLocal(i));
1636
1637        // loop_top: while i < len(xs)
1638        let loop_top = self.code.len();
1639        self.emit(Op::LoadLocal(i));
1640        self.emit(Op::LoadLocal(xs));
1641        self.emit(Op::GetListLen);
1642        self.emit(Op::NumLt);
1643        let j_exit = self.code.len();
1644        self.emit(Op::JumpIfNot(0));
1645
1646        // pair := xs[i]
1647        self.emit(Op::LoadLocal(xs));
1648        self.emit(Op::LoadLocal(i));
1649        self.emit(Op::GetListElemDyn);
1650        let pair = self.alloc_local("__mf_pair");
1651        self.emit(Op::StoreLocal(pair));
1652
1653        // acc := f(acc, pair.0, pair.1)
1654        let nid = self.pool.node_id("n_map_fold");
1655        self.emit(Op::LoadLocal(f));
1656        self.emit(Op::LoadLocal(acc));
1657        self.emit(Op::LoadLocal(pair));
1658        self.emit(Op::GetElem(0));
1659        self.emit(Op::LoadLocal(pair));
1660        self.emit(Op::GetElem(1));
1661        self.emit(Op::CallClosure { arity: 3, node_id_idx: nid });
1662        self.emit(Op::StoreLocal(acc));
1663
1664        // i := i + 1
1665        self.emit(Op::LoadLocal(i));
1666        let one = self.pool.int(1);
1667        self.emit(Op::PushConst(one));
1668        self.emit(Op::NumAdd);
1669        self.emit(Op::StoreLocal(i));
1670
1671        let jump_back = self.code.len();
1672        let back = (loop_top as i32) - (jump_back as i32 + 1);
1673        self.emit(Op::Jump(back));
1674
1675        let exit_target = self.code.len() as i32;
1676        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
1677            *off = exit_target - (j_exit as i32 + 1);
1678        }
1679        self.emit(Op::LoadLocal(acc));
1680    }
1681
1682    /// Inline pattern: `<module>.map(v, f)` and friends.
1683    /// `wrap_with`: variant tag whose payload triggers the call (Ok / Some / Err).
1684    /// `wrap_result`: if true, wrap the closure's result back in `wrap_with`
1685    /// (map shape); if false, expect the closure to return a wrapped value
1686    /// itself (and_then shape).
1687    fn emit_variant_map(
1688        &mut self,
1689        args: &[a::CExpr],
1690        wrap_with: &str,
1691        wrap_result: bool,
1692    ) {
1693        // args[0] = the wrapped value (Result/Option), args[1] = closure
1694        let wrap_idx = self.pool.variant(wrap_with);
1695
1696        // Compile and store the value into a local, evaluate closure on top of stack.
1697        self.compile_expr(&args[0], false);
1698        let val_slot = self.alloc_local("__hov");
1699        self.emit(Op::StoreLocal(val_slot));
1700
1701        self.compile_expr(&args[1], false);
1702        let f_slot = self.alloc_local("__hof");
1703        self.emit(Op::StoreLocal(f_slot));
1704
1705        // Stack discipline:
1706        //   load val ⇒ [v]
1707        //   dup     ⇒ [v, v]
1708        //   test    ⇒ [v, Bool]
1709        //   jumpifnot ⇒ [v]
1710        // Both branches end with [v] before the branch body.
1711        self.emit(Op::LoadLocal(val_slot));
1712        self.emit(Op::Dup);
1713        self.emit(Op::TestVariant(wrap_idx));
1714        let j_skip = self.code.len();
1715        self.emit(Op::JumpIfNot(0));
1716
1717        // Matched arm: extract payload, call closure on it.
1718        self.emit(Op::GetVariantArg(0));
1719        let arg_slot = self.alloc_local("__hov_arg");
1720        self.emit(Op::StoreLocal(arg_slot));
1721        self.emit(Op::LoadLocal(f_slot));
1722        self.emit(Op::LoadLocal(arg_slot));
1723        let nid = self.pool.node_id("n_hov");
1724        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
1725        if wrap_result {
1726            self.emit(Op::MakeVariant { name_idx: wrap_idx, arity: 1 });
1727        }
1728        let j_end = self.code.len();
1729        self.emit(Op::Jump(0));
1730
1731        // Skip arm: stack already has [v] from the failed Dup; nothing to do.
1732        let skip_target = self.code.len() as i32;
1733        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
1734            *off = skip_target - (j_skip as i32 + 1);
1735        }
1736
1737        let end_target = self.code.len() as i32;
1738        if let Op::Jump(off) = &mut self.code[j_end] {
1739            *off = end_target - (j_end as i32 + 1);
1740        }
1741    }
1742
1743    /// Sibling of `emit_variant_map` for the recovery combinators
1744    /// `result.or_else` and `option.or_else`. Differences from
1745    /// `emit_variant_map`:
1746    ///   - matches on the *negative* variant (`Err` / `None`)
1747    ///   - the closure's result becomes the call's result directly,
1748    ///     with no wrapping (it is itself a `Result` / `Option`)
1749    ///   - `option.or_else`'s closure takes zero args (`None` has no
1750    ///     payload to forward)
1751    fn emit_variant_or_else(
1752        &mut self,
1753        args: &[a::CExpr],
1754        match_on: &str,
1755        closure_arity: u16,
1756    ) {
1757        let match_idx = self.pool.variant(match_on);
1758
1759        self.compile_expr(&args[0], false);
1760        let val_slot = self.alloc_local("__hoe");
1761        self.emit(Op::StoreLocal(val_slot));
1762
1763        self.compile_expr(&args[1], false);
1764        let f_slot = self.alloc_local("__hoe_f");
1765        self.emit(Op::StoreLocal(f_slot));
1766
1767        // Stack discipline mirrors emit_variant_map:
1768        //   load val      ⇒ [v]
1769        //   dup           ⇒ [v, v]
1770        //   test          ⇒ [v, Bool]
1771        //   jumpifnot     ⇒ [v]
1772        // The unmatched arm leaves [v] (Ok/Some unchanged); the
1773        // matched arm pops [v] and pushes the closure's result.
1774        self.emit(Op::LoadLocal(val_slot));
1775        self.emit(Op::Dup);
1776        self.emit(Op::TestVariant(match_idx));
1777        let j_skip = self.code.len();
1778        self.emit(Op::JumpIfNot(0));
1779
1780        // Matched arm: pop the duplicate left on the stack,
1781        // then call the closure with whatever payload it expects.
1782        self.emit(Op::Pop);
1783        self.emit(Op::LoadLocal(f_slot));
1784        if closure_arity == 1 {
1785            self.emit(Op::LoadLocal(val_slot));
1786            self.emit(Op::GetVariantArg(0));
1787        }
1788        let nid = self.pool.node_id("n_hoe");
1789        self.emit(Op::CallClosure { arity: closure_arity, node_id_idx: nid });
1790
1791        let j_end = self.code.len();
1792        self.emit(Op::Jump(0));
1793
1794        // Unmatched arm: stack already holds [v]; nothing to do.
1795        let skip_target = self.code.len() as i32;
1796        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
1797            *off = skip_target - (j_skip as i32 + 1);
1798        }
1799
1800        let end_target = self.code.len() as i32;
1801        if let Op::Jump(off) = &mut self.code[j_end] {
1802            *off = end_target - (j_end as i32 + 1);
1803        }
1804    }
1805
1806    /// `option.unwrap_or_else(opt, f)` — lazy default via zero-arg thunk.
1807    ///   Some(x) → x          (unwrap; no wrapping)
1808    ///   None    → f()        (call thunk; return its result directly)
1809    fn emit_option_unwrap_or_else(&mut self, args: &[a::CExpr]) {
1810        let some_idx = self.pool.variant("Some");
1811
1812        // Compile opt and f; stash both so they're accessible on both arms.
1813        self.compile_expr(&args[0], false);
1814        let val_slot = self.alloc_local("__uoe_val");
1815        self.emit(Op::StoreLocal(val_slot));
1816
1817        self.compile_expr(&args[1], false);
1818        let f_slot = self.alloc_local("__uoe_f");
1819        self.emit(Op::StoreLocal(f_slot));
1820
1821        // Test whether opt is Some.
1822        //   load val ⇒ [v]
1823        //   dup      ⇒ [v, v]
1824        //   test     ⇒ [v, Bool]
1825        //   jumpifnot → None arm
1826        self.emit(Op::LoadLocal(val_slot));
1827        self.emit(Op::Dup);
1828        self.emit(Op::TestVariant(some_idx));
1829        let j_none = self.code.len();
1830        self.emit(Op::JumpIfNot(0));
1831
1832        // Some arm: extract the payload from [v] left on the stack.
1833        self.emit(Op::GetVariantArg(0));
1834        let j_end = self.code.len();
1835        self.emit(Op::Jump(0));
1836
1837        // None arm: pop the [v] duplicate, call the thunk.
1838        let none_target = self.code.len() as i32;
1839        if let Op::JumpIfNot(off) = &mut self.code[j_none] {
1840            *off = none_target - (j_none as i32 + 1);
1841        }
1842        self.emit(Op::Pop);
1843        self.emit(Op::LoadLocal(f_slot));
1844        let nid = self.pool.node_id("n_uoe");
1845        self.emit(Op::CallClosure { arity: 0, node_id_idx: nid });
1846
1847        // Patch jump-to-end from Some arm.
1848        let end_target = self.code.len() as i32;
1849        if let Op::Jump(off) = &mut self.code[j_end] {
1850            *off = end_target - (j_end as i32 + 1);
1851        }
1852    }
1853
1854    // ---- std.flow trampolines ----------------------------------------
1855    //
1856    // Each flow.<op>(c1, c2, ...) call site:
1857    //   1. compiles its closure args and leaves them on the stack
1858    //   2. registers a fresh "trampoline" Function whose body invokes
1859    //      those captured closures appropriately
1860    //   3. emits MakeClosure { fn_id: trampoline, capture_count: N }
1861    //
1862    // The trampoline's parameter layout is [capture_0, ..., capture_{N-1},
1863    // arg_0, ...]: captures first, the closure's own args after.
1864
1865    /// Allocate a fresh fn_id for a trampoline and install its bytecode.
1866    /// Trampolines are the one Function-creation path that already has
1867    /// the body in hand at install time (top-level fns and lambdas have
1868    /// it filled in later), so we compute `body_hash` immediately. The
1869    /// final hash pass at the end of `compile_program` is a no-op here.
1870    fn install_trampoline(&mut self, name: &str, arity: u16, locals_count: u16, code: Vec<Op>) -> u32 {
1871        let fn_id = self.next_fn_id.len() as u32;
1872        let body_hash = crate::program::compute_body_hash(arity, locals_count, &code);
1873        self.next_fn_id.push(Function {
1874            name: name.into(),
1875            arity,
1876            locals_count,
1877            code,
1878            effects: Vec::new(),
1879            body_hash,
1880            // Trampolines (flow.sequential / parallel / etc.) don't
1881            // surface refined params at this layer.
1882            refinements: Vec::new(),
1883        });
1884        fn_id
1885    }
1886
1887    /// `flow.sequential(f, g)` returns a closure `(x) -> g(f(x))`.
1888    fn emit_flow_sequential(&mut self, args: &[a::CExpr]) {
1889        // Push f, g; build the trampoline closure with 2 captures.
1890        self.compile_expr(&args[0], false);
1891        self.compile_expr(&args[1], false);
1892        let nid = self.pool.node_id("n_flow_sequential");
1893        let code = vec![
1894            // Locals: [f=0, g=1, x=2]
1895            Op::LoadLocal(0),                                  // push f
1896            Op::LoadLocal(2),                                  // push x
1897            Op::CallClosure { arity: 1, node_id_idx: nid },    // r = f(x)
1898            // stack: [r]
1899            Op::StoreLocal(3),                                 // tmp = r
1900            Op::LoadLocal(1),                                  // push g
1901            Op::LoadLocal(3),                                  // push tmp
1902            Op::CallClosure { arity: 1, node_id_idx: nid },    // r = g(tmp)
1903            Op::Return,
1904        ];
1905        let fn_id = self.install_trampoline("__flow_sequential", 3, 4, code);
1906        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
1907    }
1908
1909    /// `flow.parallel(fa, fb)` returns a closure `() -> (fa(), fb())`.
1910    /// Implementation is sequential: each function is called in order
1911    /// and the results are packed into a 2-tuple. The spec (§11.2)
1912    /// allows the runtime to apply true parallelism here; that needs
1913    /// a thread-safe handler split and is left to a follow-up. The
1914    /// signature is what users program against — sequential vs threaded
1915    /// is an implementation detail invisible to the type system.
1916    fn emit_flow_parallel(&mut self, args: &[a::CExpr]) {
1917        // Push fa, fb; build a 0-arg trampoline closure with 2 captures.
1918        self.compile_expr(&args[0], false);
1919        self.compile_expr(&args[1], false);
1920        let nid = self.pool.node_id("n_flow_parallel");
1921        let code = vec![
1922            // Locals: [fa=0, fb=1]
1923            Op::LoadLocal(0),                                  // push fa
1924            Op::CallClosure { arity: 0, node_id_idx: nid },    // a = fa()
1925            Op::LoadLocal(1),                                  // push fb
1926            Op::CallClosure { arity: 0, node_id_idx: nid },    // b = fb()
1927            Op::MakeTuple(2),                                  // (a, b)
1928            Op::Return,
1929        ];
1930        let fn_id = self.install_trampoline("__flow_parallel", 2, 2, code);
1931        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
1932    }
1933
1934    /// `flow.parallel_list(actions)` runs each 0-arg closure in `actions`
1935    /// and returns the results as a list in input order. Variadic
1936    /// counterpart to `flow.parallel`. Sequential under the hood — the
1937    /// spec (§11.2) reserves true threading for a future scheduler.
1938    /// Compiled inline (mirrors `list.map`) so closure args can flow
1939    /// through `CallClosure` without a heap-allocated trampoline.
1940    fn emit_flow_parallel_list(&mut self, args: &[a::CExpr]) {
1941        // xs := actions
1942        self.compile_expr(&args[0], false);
1943        let xs = self.alloc_local("__fpl_xs");
1944        self.emit(Op::StoreLocal(xs));
1945
1946        // out := []
1947        self.emit(Op::MakeList(0));
1948        let out = self.alloc_local("__fpl_out");
1949        self.emit(Op::StoreLocal(out));
1950
1951        // i := 0
1952        let zero = self.pool.int(0);
1953        self.emit(Op::PushConst(zero));
1954        let i = self.alloc_local("__fpl_i");
1955        self.emit(Op::StoreLocal(i));
1956
1957        // loop_top: while i < len(xs) { ... }
1958        let loop_top = self.code.len();
1959        self.emit(Op::LoadLocal(i));
1960        self.emit(Op::LoadLocal(xs));
1961        self.emit(Op::GetListLen);
1962        self.emit(Op::NumLt);
1963        let j_exit = self.code.len();
1964        self.emit(Op::JumpIfNot(0));
1965
1966        // body: out := out ++ [xs[i]()]
1967        let nid = self.pool.node_id("n_flow_parallel_list");
1968        self.emit(Op::LoadLocal(out));
1969        self.emit(Op::LoadLocal(xs));
1970        self.emit(Op::LoadLocal(i));
1971        self.emit(Op::GetListElemDyn);
1972        self.emit(Op::CallClosure { arity: 0, node_id_idx: nid });
1973        self.emit(Op::ListAppend);
1974        self.emit(Op::StoreLocal(out));
1975
1976        // i := i + 1
1977        self.emit(Op::LoadLocal(i));
1978        let one = self.pool.int(1);
1979        self.emit(Op::PushConst(one));
1980        self.emit(Op::NumAdd);
1981        self.emit(Op::StoreLocal(i));
1982
1983        // jump back
1984        let jump_back = self.code.len();
1985        let back = (loop_top as i32) - (jump_back as i32 + 1);
1986        self.emit(Op::Jump(back));
1987
1988        // exit: patch j_exit, push out
1989        let exit_target = self.code.len() as i32;
1990        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
1991            *off = exit_target - (j_exit as i32 + 1);
1992        }
1993        self.emit(Op::LoadLocal(out));
1994    }
1995
1996    /// `flow.branch(cond, t, f)` returns a closure `(x) -> if cond(x) then t(x) else f(x)`.
1997    fn emit_flow_branch(&mut self, args: &[a::CExpr]) {
1998        self.compile_expr(&args[0], false);
1999        self.compile_expr(&args[1], false);
2000        self.compile_expr(&args[2], false);
2001        let nid = self.pool.node_id("n_flow_branch");
2002        let mut code = vec![
2003            // Locals: [cond=0, t=1, f=2, x=3]
2004            Op::LoadLocal(0),                               // push cond
2005            Op::LoadLocal(3),                               // push x
2006            Op::CallClosure { arity: 1, node_id_idx: nid }, // bool
2007        ];
2008        let j_false = code.len();
2009        code.push(Op::JumpIfNot(0));                        // patched
2010        // true arm: t(x)
2011        code.push(Op::LoadLocal(1));
2012        code.push(Op::LoadLocal(3));
2013        code.push(Op::CallClosure { arity: 1, node_id_idx: nid });
2014        code.push(Op::Return);
2015        // false arm
2016        let false_target = code.len() as i32;
2017        if let Op::JumpIfNot(off) = &mut code[j_false] {
2018            *off = false_target - (j_false as i32 + 1);
2019        }
2020        code.push(Op::LoadLocal(2));
2021        code.push(Op::LoadLocal(3));
2022        code.push(Op::CallClosure { arity: 1, node_id_idx: nid });
2023        code.push(Op::Return);
2024
2025        let fn_id = self.install_trampoline("__flow_branch", 4, 4, code);
2026        self.emit(Op::MakeClosure { fn_id, capture_count: 3 });
2027    }
2028
2029    /// `flow.retry(f, max_attempts)` returns a closure `(x) -> Result[U, E]`
2030    /// that calls `f(x)` up to `max_attempts` times, returning the first
2031    /// `Ok` or the final `Err`.
2032    fn emit_flow_retry(&mut self, args: &[a::CExpr]) {
2033        self.compile_expr(&args[0], false);
2034        self.compile_expr(&args[1], false);
2035        let call_nid = self.pool.node_id("n_flow_retry");
2036        let ok_idx = self.pool.variant("Ok");
2037        let zero_const = self.pool.int(0);
2038        let one_const = self.pool.int(1);
2039        // Locals: [f=0, max=1, x=2, i=3, last=4]
2040        let mut code = vec![
2041            // i := 0
2042            Op::PushConst(zero_const),
2043            Op::StoreLocal(3),
2044        ];
2045        // loop_top: while i < max
2046        let loop_top = code.len() as i32;
2047        code.push(Op::LoadLocal(3));
2048        code.push(Op::LoadLocal(1));
2049        code.push(Op::NumLt);
2050        let j_done = code.len();
2051        code.push(Op::JumpIfNot(0));                       // patched
2052
2053        // body: r := f(x); last := r
2054        code.push(Op::LoadLocal(0));
2055        code.push(Op::LoadLocal(2));
2056        code.push(Op::CallClosure { arity: 1, node_id_idx: call_nid });
2057        code.push(Op::StoreLocal(4));
2058
2059        // Test variant Ok on last; if so, return last.
2060        code.push(Op::LoadLocal(4));
2061        code.push(Op::TestVariant(ok_idx));
2062        let j_was_err = code.len();
2063        code.push(Op::JumpIfNot(0));                       // patched: skip return
2064        code.push(Op::LoadLocal(4));
2065        code.push(Op::Return);
2066
2067        // was_err: i := i + 1; jump loop_top
2068        let was_err_target = code.len() as i32;
2069        if let Op::JumpIfNot(off) = &mut code[j_was_err] {
2070            *off = was_err_target - (j_was_err as i32 + 1);
2071        }
2072        code.push(Op::LoadLocal(3));
2073        code.push(Op::PushConst(one_const));
2074        code.push(Op::NumAdd);
2075        code.push(Op::StoreLocal(3));
2076        let pc_after_jump = code.len() as i32 + 1;
2077        code.push(Op::Jump(loop_top - pc_after_jump));
2078
2079        // done: return last (the final Err, or Unit if max=0).
2080        let done_target = code.len() as i32;
2081        if let Op::JumpIfNot(off) = &mut code[j_done] {
2082            *off = done_target - (j_done as i32 + 1);
2083        }
2084        code.push(Op::LoadLocal(4));
2085        code.push(Op::Return);
2086
2087        let fn_id = self.install_trampoline("__flow_retry", 3, 5, code);
2088        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
2089    }
2090
2091    /// `flow.retry_with_backoff(f, attempts, base_ms)` (#226). Variant
2092    /// of `flow.retry` that sleeps between attempts. The first
2093    /// attempt fires immediately; attempt k > 1 waits `base_ms *
2094    /// 2^(k-2)` ms before retrying. Sleeps go through
2095    /// `time.sleep_ms`, which is why the resulting closure carries
2096    /// `[time]` in its effect row even though the underlying `f` is
2097    /// pure.
2098    fn emit_flow_retry_with_backoff(&mut self, args: &[a::CExpr]) {
2099        // Push captures: f, max, base_ms. The trampoline takes one
2100        // call-time arg `x`, so capture_count = 3, arity = 4.
2101        self.compile_expr(&args[0], false);
2102        self.compile_expr(&args[1], false);
2103        self.compile_expr(&args[2], false);
2104        let call_nid    = self.pool.node_id("n_flow_retry_backoff");
2105        let sleep_nid   = self.pool.node_id("n_flow_retry_backoff_sleep");
2106        let kind_idx    = self.pool.str("time");
2107        let op_idx      = self.pool.str("sleep_ms");
2108        let ok_idx      = self.pool.variant("Ok");
2109        let zero_const  = self.pool.int(0);
2110        let one_const   = self.pool.int(1);
2111        let two_const   = self.pool.int(2);
2112        // Locals layout:
2113        //   0=f, 1=max, 2=base_ms (captures)
2114        //   3=x (arg)
2115        //   4=i, 5=last, 6=next_delay (working state)
2116        let mut code = vec![
2117            // next_delay := base_ms
2118            Op::LoadLocal(2),
2119            Op::StoreLocal(6),
2120            // i := 0
2121            Op::PushConst(zero_const),
2122            Op::StoreLocal(4),
2123        ];
2124
2125        let loop_top = code.len() as i32;
2126        // while i < max
2127        code.push(Op::LoadLocal(4));
2128        code.push(Op::LoadLocal(1));
2129        code.push(Op::NumLt);
2130        let j_done = code.len();
2131        code.push(Op::JumpIfNot(0)); // patched
2132
2133        // if i > 0: time.sleep_ms(next_delay); next_delay := next_delay * 2
2134        code.push(Op::PushConst(zero_const));
2135        code.push(Op::LoadLocal(4));
2136        code.push(Op::NumLt);                // 0 < i ?
2137        let j_no_sleep = code.len();
2138        code.push(Op::JumpIfNot(0));         // patched: skip the sleep block
2139        // Sleep
2140        code.push(Op::LoadLocal(6));         // arg = next_delay
2141        code.push(Op::EffectCall {
2142            kind_idx, op_idx, arity: 1, node_id_idx: sleep_nid,
2143        });
2144        code.push(Op::Pop);                  // discard the Unit result
2145        // next_delay := next_delay * 2
2146        code.push(Op::LoadLocal(6));
2147        code.push(Op::PushConst(two_const));
2148        code.push(Op::NumMul);
2149        code.push(Op::StoreLocal(6));
2150        // patch the no-sleep skip
2151        let after_sleep = code.len() as i32;
2152        if let Op::JumpIfNot(off) = &mut code[j_no_sleep] {
2153            *off = after_sleep - (j_no_sleep as i32 + 1);
2154        }
2155
2156        // last := f(x)
2157        code.push(Op::LoadLocal(0));
2158        code.push(Op::LoadLocal(3));
2159        code.push(Op::CallClosure { arity: 1, node_id_idx: call_nid });
2160        code.push(Op::StoreLocal(5));
2161
2162        // if Ok(last): return last
2163        code.push(Op::LoadLocal(5));
2164        code.push(Op::TestVariant(ok_idx));
2165        let j_was_err = code.len();
2166        code.push(Op::JumpIfNot(0)); // patched
2167        code.push(Op::LoadLocal(5));
2168        code.push(Op::Return);
2169
2170        // was_err: i := i + 1; jump loop_top
2171        let was_err_target = code.len() as i32;
2172        if let Op::JumpIfNot(off) = &mut code[j_was_err] {
2173            *off = was_err_target - (j_was_err as i32 + 1);
2174        }
2175        code.push(Op::LoadLocal(4));
2176        code.push(Op::PushConst(one_const));
2177        code.push(Op::NumAdd);
2178        code.push(Op::StoreLocal(4));
2179        let pc_after_jump = code.len() as i32 + 1;
2180        code.push(Op::Jump(loop_top - pc_after_jump));
2181
2182        // done: return last (the final Err, or Unit if max=0).
2183        let done_target = code.len() as i32;
2184        if let Op::JumpIfNot(off) = &mut code[j_done] {
2185            *off = done_target - (j_done as i32 + 1);
2186        }
2187        code.push(Op::LoadLocal(5));
2188        code.push(Op::Return);
2189
2190        let fn_id = self.install_trampoline("__flow_retry_backoff", 4, 7, code);
2191        self.emit(Op::MakeClosure { fn_id, capture_count: 3 });
2192    }
2193}
2194
2195/// Collect free variables referenced in `e` that are not in `bound`.
2196/// Mutates `bound` to track let/lambda introductions during the walk;
2197/// the caller's set is preserved on return because Rust's borrow rules
2198/// force us to clone for sub-scopes that rebind a name.
2199fn free_vars(e: &a::CExpr, bound: &mut std::collections::HashSet<String>, out: &mut Vec<String>) {
2200    match e {
2201        a::CExpr::Literal { .. } => {}
2202        a::CExpr::Var { name } => {
2203            if !bound.contains(name) && !out.contains(name) {
2204                out.push(name.clone());
2205            }
2206        }
2207        a::CExpr::Call { callee, args } => {
2208            free_vars(callee, bound, out);
2209            for a in args { free_vars(a, bound, out); }
2210        }
2211        a::CExpr::Let { name, value, body, .. } => {
2212            free_vars(value, bound, out);
2213            let was_bound = bound.contains(name);
2214            bound.insert(name.clone());
2215            free_vars(body, bound, out);
2216            if !was_bound { bound.remove(name); }
2217        }
2218        a::CExpr::Match { scrutinee, arms } => {
2219            free_vars(scrutinee, bound, out);
2220            for arm in arms {
2221                let mut local_bound = bound.clone();
2222                pattern_binders(&arm.pattern, &mut local_bound);
2223                free_vars(&arm.body, &mut local_bound, out);
2224            }
2225        }
2226        a::CExpr::Block { statements, result } => {
2227            let mut local_bound = bound.clone();
2228            for s in statements { free_vars(s, &mut local_bound, out); }
2229            free_vars(result, &mut local_bound, out);
2230        }
2231        a::CExpr::Constructor { args, .. } => {
2232            for a in args { free_vars(a, bound, out); }
2233        }
2234        a::CExpr::RecordLit { fields } => {
2235            for f in fields { free_vars(&f.value, bound, out); }
2236        }
2237        a::CExpr::TupleLit { items } | a::CExpr::ListLit { items } => {
2238            for it in items { free_vars(it, bound, out); }
2239        }
2240        a::CExpr::FieldAccess { value, .. } => free_vars(value, bound, out),
2241        a::CExpr::Lambda { params, body, .. } => {
2242            let mut inner = bound.clone();
2243            for p in params { inner.insert(p.name.clone()); }
2244            free_vars(body, &mut inner, out);
2245        }
2246        a::CExpr::BinOp { lhs, rhs, .. } => {
2247            free_vars(lhs, bound, out);
2248            free_vars(rhs, bound, out);
2249        }
2250        a::CExpr::UnaryOp { expr, .. } => free_vars(expr, bound, out),
2251        a::CExpr::Return { value } => free_vars(value, bound, out),
2252    }
2253}
2254
2255fn pattern_binders(p: &a::Pattern, bound: &mut std::collections::HashSet<String>) {
2256    match p {
2257        a::Pattern::PWild | a::Pattern::PLiteral { .. } => {}
2258        a::Pattern::PVar { name } => { bound.insert(name.clone()); }
2259        a::Pattern::PConstructor { args, .. } => {
2260            for a in args { pattern_binders(a, bound); }
2261        }
2262        a::Pattern::PRecord { fields } => {
2263            for f in fields { pattern_binders(&f.pattern, bound); }
2264        }
2265        a::Pattern::PTuple { items } => {
2266            for it in items { pattern_binders(it, bound); }
2267        }
2268    }
2269}