Skip to main content

lex_bytecode/
compiler.rs

1//! M4 compiler: canonical AST → bytecode.
2
3use crate::op::*;
4use crate::program::*;
5use indexmap::IndexMap;
6use lex_ast as a;
7
8#[derive(Default)]
9struct ConstPool {
10    pool: Vec<Const>,
11    fields: IndexMap<String, u32>,
12    variants: IndexMap<String, u32>,
13    node_ids: IndexMap<String, u32>,
14    ints: IndexMap<i64, u32>,
15    bools: IndexMap<u8, u32>,
16    strs: IndexMap<String, u32>,
17}
18
19impl ConstPool {
20    fn field(&mut self, name: &str) -> u32 {
21        if let Some(i) = self.fields.get(name) { return *i; }
22        let i = self.pool.len() as u32;
23        self.pool.push(Const::FieldName(name.into()));
24        self.fields.insert(name.into(), i);
25        i
26    }
27    fn variant(&mut self, name: &str) -> u32 {
28        if let Some(i) = self.variants.get(name) { return *i; }
29        let i = self.pool.len() as u32;
30        self.pool.push(Const::VariantName(name.into()));
31        self.variants.insert(name.into(), i);
32        i
33    }
34    fn node_id(&mut self, name: &str) -> u32 {
35        if let Some(i) = self.node_ids.get(name) { return *i; }
36        let i = self.pool.len() as u32;
37        self.pool.push(Const::NodeId(name.into()));
38        self.node_ids.insert(name.into(), i);
39        i
40    }
41    fn int(&mut self, n: i64) -> u32 {
42        if let Some(i) = self.ints.get(&n) { return *i; }
43        let i = self.pool.len() as u32;
44        self.pool.push(Const::Int(n));
45        self.ints.insert(n, i);
46        i
47    }
48    fn bool(&mut self, b: bool) -> u32 {
49        let key = b as u8;
50        if let Some(i) = self.bools.get(&key) { return *i; }
51        let i = self.pool.len() as u32;
52        self.pool.push(Const::Bool(b));
53        self.bools.insert(key, i);
54        i
55    }
56    fn str(&mut self, s: &str) -> u32 {
57        if let Some(i) = self.strs.get(s) { return *i; }
58        let i = self.pool.len() as u32;
59        self.pool.push(Const::Str(s.into()));
60        self.strs.insert(s.into(), i);
61        i
62    }
63    fn float(&mut self, f: f64) -> u32 {
64        // Floats: not deduped (NaN issues).
65        let i = self.pool.len() as u32;
66        self.pool.push(Const::Float(f));
67        i
68    }
69    fn unit(&mut self) -> u32 {
70        let i = self.pool.len() as u32;
71        self.pool.push(Const::Unit);
72        i
73    }
74}
75
76pub fn compile_program(stages: &[a::Stage]) -> Program {
77    let mut p = Program {
78        constants: Vec::new(),
79        functions: Vec::new(),
80        function_names: IndexMap::new(),
81        module_aliases: IndexMap::new(),
82        entry: None,
83    };
84
85    // Collect imports as alias → module-name. The module name is the part
86    // after `std.` (so `import "std.io" as io` ⇒ alias `io` → module `io`).
87    for s in stages {
88        if let a::Stage::Import(i) = s {
89            let module = i.reference.strip_prefix("std.").unwrap_or(&i.reference).to_string();
90            p.module_aliases.insert(i.alias.clone(), module);
91        }
92    }
93
94    for s in stages {
95        if let a::Stage::FnDecl(fd) = s {
96            let idx = p.functions.len() as u32;
97            p.function_names.insert(fd.name.clone(), idx);
98            p.functions.push(Function {
99                name: fd.name.clone(),
100                arity: fd.params.len() as u16,
101                locals_count: 0,
102                code: Vec::new(),
103                effects: fd.effects.iter().map(|e| DeclaredEffect {
104                    kind: e.name.clone(),
105                    arg: e.arg.as_ref().map(|a| match a {
106                        a::EffectArg::Str { value } => EffectArg::Str(value.clone()),
107                        a::EffectArg::Int { value } => EffectArg::Int(*value),
108                        a::EffectArg::Ident { value } => EffectArg::Ident(value.clone()),
109                    }),
110                }).collect(),
111            });
112        }
113    }
114
115    let mut pool = ConstPool::default();
116    let function_names = p.function_names.clone();
117    let module_aliases = p.module_aliases.clone();
118    let mut pending_lambdas: Vec<PendingLambda> = Vec::new();
119
120    for s in stages {
121        if let a::Stage::FnDecl(_) = s {
122            // Build a NodeId map for *this* stage so the compiler can stamp
123            // each Call/EffectCall opcode with the originating AST node.
124            let id_map = lex_ast::expr_ids(s);
125            let fd = match s { a::Stage::FnDecl(fd) => fd, _ => unreachable!() };
126            let mut fc = FnCompiler {
127                code: Vec::new(),
128                locals: IndexMap::new(),
129                next_local: 0,
130                peak_local: 0,
131                pool: &mut pool,
132                function_names: &function_names,
133                module_aliases: &module_aliases,
134                id_map: &id_map,
135                pending_lambdas: &mut pending_lambdas,
136                next_fn_id: &mut p.functions,
137            };
138            for param in &fd.params {
139                let i = fc.next_local;
140                fc.locals.insert(param.name.clone(), i);
141                fc.next_local += 1;
142                fc.peak_local = fc.next_local;
143            }
144            fc.compile_expr(&fd.body, true);
145            fc.code.push(Op::Return);
146            let code = std::mem::take(&mut fc.code);
147            let peak = fc.peak_local;
148            drop(fc);
149            let idx = function_names[&fd.name];
150            p.functions[idx as usize].code = code;
151            p.functions[idx as usize].locals_count = peak;
152        }
153    }
154
155    // Compile pending lambdas in FIFO order. Each lambda may emit further
156    // lambdas; loop until the queue drains.
157    while let Some(pl) = pending_lambdas.pop() {
158        let id_map = std::collections::HashMap::new();
159        let mut fc = FnCompiler {
160            code: Vec::new(),
161            locals: IndexMap::new(),
162            next_local: 0,
163            peak_local: 0,
164            pool: &mut pool,
165            function_names: &function_names,
166            module_aliases: &module_aliases,
167            id_map: &id_map,
168            pending_lambdas: &mut pending_lambdas,
169            next_fn_id: &mut p.functions,
170        };
171        for name in &pl.capture_names {
172            let i = fc.next_local;
173            fc.locals.insert(name.clone(), i);
174            fc.next_local += 1;
175            fc.peak_local = fc.next_local;
176        }
177        for p in &pl.params {
178            let i = fc.next_local;
179            fc.locals.insert(p.name.clone(), i);
180            fc.next_local += 1;
181            fc.peak_local = fc.next_local;
182        }
183        fc.compile_expr(&pl.body, true);
184        fc.code.push(Op::Return);
185        let code = std::mem::take(&mut fc.code);
186        let peak = fc.peak_local;
187        drop(fc);
188        p.functions[pl.fn_id as usize].code = code;
189        p.functions[pl.fn_id as usize].locals_count = peak;
190    }
191
192    p.constants = pool.pool;
193    p
194}
195
196#[derive(Debug, Clone)]
197struct PendingLambda {
198    fn_id: u32,
199    /// Names of captured outer-scope locals, in order.
200    capture_names: Vec<String>,
201    params: Vec<a::Param>,
202    body: a::CExpr,
203}
204
205struct FnCompiler<'a> {
206    code: Vec<Op>,
207    locals: IndexMap<String, u16>,
208    next_local: u16,
209    /// Peak local usage seen during compilation (for VM frame sizing).
210    peak_local: u16,
211    pool: &'a mut ConstPool,
212    function_names: &'a IndexMap<String, u32>,
213    module_aliases: &'a IndexMap<String, String>,
214    /// CExpr address → NodeId, populated per stage via `lex_ast::expr_ids`.
215    id_map: &'a std::collections::HashMap<*const a::CExpr, lex_ast::NodeId>,
216    /// Queue of lambdas discovered during compilation; each gets a fresh
217    /// fn_id and is compiled in a later pass.
218    pending_lambdas: &'a mut Vec<PendingLambda>,
219    /// Mutable view of the function table — used to allocate fn_ids for
220    /// freshly-discovered lambdas.
221    next_fn_id: &'a mut Vec<Function>,
222}
223
224impl<'a> FnCompiler<'a> {
225    fn alloc_local(&mut self, name: &str) -> u16 {
226        let i = self.next_local;
227        self.locals.insert(name.into(), i);
228        self.next_local += 1;
229        if self.next_local > self.peak_local { self.peak_local = self.next_local; }
230        i
231    }
232    fn emit(&mut self, op: Op) { self.code.push(op); }
233
234    fn compile_expr(&mut self, e: &a::CExpr, tail: bool) {
235        match e {
236            a::CExpr::Literal { value } => self.compile_lit(value),
237            a::CExpr::Var { name } => {
238                if let Some(slot) = self.locals.get(name) {
239                    self.emit(Op::LoadLocal(*slot));
240                } else if let Some(&fn_id) = self.function_names.get(name) {
241                    // Function name used as a *value* (e.g. as a record-field
242                    // initializer or fold-callback arg) — materialize it as a
243                    // closure with no captures. The runtime already accepts
244                    // `Value::Closure { fn_id, captures: vec![] }` and
245                    // `CallClosure` dispatches it. (#169)
246                    self.emit(Op::MakeClosure { fn_id, capture_count: 0 });
247                } else {
248                    // Should be caught at type-check time; the type checker
249                    // walks every Var. If we land here it's a compiler bug,
250                    // not a user typo.
251                    panic!("unknown var in compiler: {name}");
252                }
253            }
254            a::CExpr::Let { name, ty: _, value, body } => {
255                self.compile_expr(value, false);
256                let slot = self.alloc_local(name);
257                self.emit(Op::StoreLocal(slot));
258                self.compile_expr(body, tail);
259            }
260            a::CExpr::Block { statements, result } => {
261                for s in statements {
262                    self.compile_expr(s, false);
263                    self.emit(Op::Pop);
264                }
265                self.compile_expr(result, tail);
266            }
267            a::CExpr::Call { callee, args } => self.compile_call(e, callee, args, tail),
268            a::CExpr::Constructor { name, args } => {
269                for a in args { self.compile_expr(a, false); }
270                let name_idx = self.pool.variant(name);
271                self.emit(Op::MakeVariant { name_idx, arity: args.len() as u16 });
272            }
273            a::CExpr::Match { scrutinee, arms } => self.compile_match(scrutinee, arms, tail),
274            a::CExpr::RecordLit { fields } => {
275                let mut idxs = Vec::with_capacity(fields.len());
276                for f in fields {
277                    self.compile_expr(&f.value, false);
278                    idxs.push(self.pool.field(&f.name));
279                }
280                self.emit(Op::MakeRecord { field_name_indices: idxs });
281            }
282            a::CExpr::TupleLit { items } => {
283                for it in items { self.compile_expr(it, false); }
284                self.emit(Op::MakeTuple(items.len() as u16));
285            }
286            a::CExpr::ListLit { items } => {
287                for it in items { self.compile_expr(it, false); }
288                self.emit(Op::MakeList(items.len() as u32));
289            }
290            a::CExpr::FieldAccess { value, field } => {
291                self.compile_expr(value, false);
292                let idx = self.pool.field(field);
293                self.emit(Op::GetField(idx));
294            }
295            a::CExpr::BinOp { op, lhs, rhs } => self.compile_binop(op, lhs, rhs),
296            a::CExpr::UnaryOp { op, expr } => {
297                self.compile_expr(expr, false);
298                match op.as_str() {
299                    "-" => self.emit(Op::NumNeg),
300                    "not" => self.emit(Op::BoolNot),
301                    other => panic!("unknown unary: {other}"),
302                }
303            }
304            a::CExpr::Lambda { params, body, .. } => self.compile_lambda(params, body),
305            a::CExpr::Return { value } => {
306                self.compile_expr(value, true);
307                self.emit(Op::Return);
308            }
309        }
310    }
311
312    fn compile_lit(&mut self, l: &a::CLit) {
313        let i = match l {
314            a::CLit::Int { value } => self.pool.int(*value),
315            a::CLit::Bool { value } => self.pool.bool(*value),
316            a::CLit::Float { value } => {
317                let f: f64 = value.parse().unwrap_or(0.0);
318                self.pool.float(f)
319            }
320            a::CLit::Str { value } => self.pool.str(value),
321            a::CLit::Bytes { value: _ } => {
322                // Stub: M4 doesn't use bytes literals in §3.13 examples.
323                let i = self.pool.pool.len() as u32;
324                self.pool.pool.push(Const::Bytes(Vec::new()));
325                i
326            }
327            a::CLit::Unit => self.pool.unit(),
328        };
329        self.emit(Op::PushConst(i));
330    }
331
332    fn compile_call(&mut self, call_expr: &a::CExpr, callee: &a::CExpr, args: &[a::CExpr], tail: bool) {
333        let node_id = self
334            .id_map
335            .get(&(call_expr as *const a::CExpr))
336            .map(|n| n.as_str().to_string())
337            .unwrap_or_else(|| "n_?".into());
338        let node_id_idx = self.pool.node_id(&node_id);
339
340        // Module function call: `alias.op(args)` where `alias` is an imported
341        // module ⇒ EffectCall, except for higher-order pure ops where we
342        // emit inline bytecode using CallClosure (the closure-arg can't be
343        // serialized through the effect handler).
344        if let a::CExpr::FieldAccess { value, field } = callee {
345            if let a::CExpr::Var { name } = value.as_ref() {
346                if let Some(module) = self.module_aliases.get(name) {
347                    if self.try_emit_higher_order(module, field, args, node_id_idx) {
348                        let _ = tail;
349                        return;
350                    }
351                    for a in args { self.compile_expr(a, false); }
352                    let kind_idx = self.pool.str(module);
353                    let op_idx = self.pool.str(field);
354                    self.emit(Op::EffectCall {
355                        kind_idx,
356                        op_idx,
357                        arity: args.len() as u16,
358                        node_id_idx,
359                    });
360                    let _ = tail;
361                    return;
362                }
363            }
364        }
365        match callee {
366            a::CExpr::Var { name } if self.function_names.contains_key(name) => {
367                for a in args { self.compile_expr(a, false); }
368                let fn_id = self.function_names[name];
369                if tail {
370                    self.emit(Op::TailCall { fn_id, arity: args.len() as u16, node_id_idx });
371                } else {
372                    self.emit(Op::Call { fn_id, arity: args.len() as u16, node_id_idx });
373                }
374            }
375            a::CExpr::Var { name } if self.locals.contains_key(name) => {
376                // First-class function value bound to a local. Push the
377                // closure, then args, then CallClosure.
378                let slot = self.locals[name];
379                self.emit(Op::LoadLocal(slot));
380                for a in args { self.compile_expr(a, false); }
381                self.emit(Op::CallClosure { arity: args.len() as u16, node_id_idx });
382            }
383            // Lambda directly applied — push closure + args + CallClosure.
384            other => {
385                self.compile_expr(other, false);
386                for a in args { self.compile_expr(a, false); }
387                self.emit(Op::CallClosure { arity: args.len() as u16, node_id_idx });
388            }
389        }
390    }
391
392    fn compile_binop(&mut self, op: &str, lhs: &a::CExpr, rhs: &a::CExpr) {
393        self.compile_expr(lhs, false);
394        self.compile_expr(rhs, false);
395        match op {
396            "+" => self.emit(Op::NumAdd),
397            "-" => self.emit(Op::NumSub),
398            "*" => self.emit(Op::NumMul),
399            "/" => self.emit(Op::NumDiv),
400            "%" => self.emit(Op::NumMod),
401            "==" => self.emit(Op::NumEq),
402            "!=" => { self.emit(Op::NumEq); self.emit(Op::BoolNot); }
403            "<" => self.emit(Op::NumLt),
404            "<=" => self.emit(Op::NumLe),
405            ">" => { self.emit_swap_top2(); self.emit(Op::NumLt); }
406            ">=" => { self.emit_swap_top2(); self.emit(Op::NumLe); }
407            "and" => self.emit(Op::BoolAnd),
408            "or" => self.emit(Op::BoolOr),
409            other => panic!("unknown binop: {other:?}"),
410        }
411    }
412
413    fn emit_swap_top2(&mut self) {
414        let a = self.alloc_local("__swap_a");
415        let b = self.alloc_local("__swap_b");
416        self.emit(Op::StoreLocal(b));
417        self.emit(Op::StoreLocal(a));
418        self.emit(Op::LoadLocal(b));
419        self.emit(Op::LoadLocal(a));
420    }
421
422    fn compile_match(&mut self, scrutinee: &a::CExpr, arms: &[a::Arm], tail: bool) {
423        self.compile_expr(scrutinee, false);
424        let scrut_slot = self.alloc_local("__scrut");
425        self.emit(Op::StoreLocal(scrut_slot));
426
427        let mut end_jumps: Vec<usize> = Vec::new();
428        for arm in arms {
429            let arm_start_locals = self.next_local;
430            let arm_start_locals_map = self.locals.clone();
431
432            self.emit(Op::LoadLocal(scrut_slot));
433            let mut bindings: Vec<(String, u16)> = Vec::new();
434            let fail_jumps: Vec<usize> = self.compile_pattern_test(&arm.pattern, &mut bindings);
435
436            self.compile_expr(&arm.body, tail);
437            let j_end = self.code.len();
438            self.emit(Op::Jump(0));
439            end_jumps.push(j_end);
440
441            let fail_target = self.code.len() as i32;
442            for j in fail_jumps {
443                if let Op::JumpIfNot(off) = &mut self.code[j] {
444                    *off = fail_target - (j as i32 + 1);
445                }
446            }
447            self.next_local = arm_start_locals;
448            self.locals = arm_start_locals_map;
449        }
450        let panic_msg_idx = self.pool.str("non-exhaustive match");
451        self.emit(Op::Panic(panic_msg_idx));
452
453        let end_target = self.code.len() as i32;
454        for j in end_jumps {
455            if let Op::Jump(off) = &mut self.code[j] {
456                *off = end_target - (j as i32 + 1);
457            }
458        }
459    }
460
461    fn compile_pattern_test(&mut self, p: &a::Pattern, bindings: &mut Vec<(String, u16)>) -> Vec<usize> {
462        let mut fails = Vec::new();
463        match p {
464            a::Pattern::PWild => { self.emit(Op::Pop); }
465            a::Pattern::PVar { name } => {
466                let slot = self.alloc_local(name);
467                self.emit(Op::StoreLocal(slot));
468                bindings.push((name.clone(), slot));
469            }
470            a::Pattern::PLiteral { value } => {
471                self.compile_lit(value);
472                match value {
473                    a::CLit::Str { .. } => self.emit(Op::StrEq),
474                    a::CLit::Bytes { .. } => self.emit(Op::BytesEq),
475                    _ => self.emit(Op::NumEq),
476                }
477                let j = self.code.len();
478                self.emit(Op::JumpIfNot(0));
479                fails.push(j);
480            }
481            a::Pattern::PConstructor { name, args } => {
482                let name_idx = self.pool.variant(name);
483                self.emit(Op::Dup);
484                self.emit(Op::TestVariant(name_idx));
485                let j = self.code.len();
486                self.emit(Op::JumpIfNot(0));
487                fails.push(j);
488                if args.is_empty() {
489                    self.emit(Op::Pop);
490                } else if args.len() == 1 {
491                    self.emit(Op::GetVariantArg(0));
492                    let sub_fails = self.compile_pattern_test(&args[0], bindings);
493                    fails.extend(sub_fails);
494                } else {
495                    let slot = self.alloc_local("__variant");
496                    self.emit(Op::StoreLocal(slot));
497                    for (i, arg) in args.iter().enumerate() {
498                        self.emit(Op::LoadLocal(slot));
499                        self.emit(Op::GetVariantArg(i as u16));
500                        let sub_fails = self.compile_pattern_test(arg, bindings);
501                        fails.extend(sub_fails);
502                    }
503                }
504            }
505            a::Pattern::PRecord { fields } => {
506                let slot = self.alloc_local("__record");
507                self.emit(Op::StoreLocal(slot));
508                for f in fields {
509                    self.emit(Op::LoadLocal(slot));
510                    let fi = self.pool.field(&f.name);
511                    self.emit(Op::GetField(fi));
512                    let sub_fails = self.compile_pattern_test(&f.pattern, bindings);
513                    fails.extend(sub_fails);
514                }
515            }
516            a::Pattern::PTuple { items } => {
517                let slot = self.alloc_local("__tuple");
518                self.emit(Op::StoreLocal(slot));
519                for (i, item) in items.iter().enumerate() {
520                    self.emit(Op::LoadLocal(slot));
521                    self.emit(Op::GetElem(i as u16));
522                    let sub_fails = self.compile_pattern_test(item, bindings);
523                    fails.extend(sub_fails);
524                }
525            }
526        }
527        fails
528    }
529
530    /// Compile a Lambda: collect free variables that resolve to outer-scope
531    /// locals, register a synthetic function, emit MakeClosure with the
532    /// captured values pushed in order.
533    fn compile_lambda(&mut self, params: &[a::Param], body: &a::CExpr) {
534        // Free vars = vars referenced in body that aren't bound locally.
535        let mut bound: std::collections::HashSet<String> = params.iter().map(|p| p.name.clone()).collect();
536        let mut frees: Vec<String> = Vec::new();
537        free_vars(body, &mut bound, &mut frees);
538
539        // Filter to those that are in the enclosing locals (captures) —
540        // skip globals (function names) which are referenced directly.
541        let captures: Vec<String> = frees.into_iter()
542            .filter(|n| self.locals.contains_key(n) && !self.function_names.contains_key(n))
543            .collect();
544
545        // Allocate a fresh fn_id by appending a placeholder Function.
546        let fn_id = self.next_fn_id.len() as u32;
547        self.next_fn_id.push(Function {
548            name: format!("__lambda_{fn_id}"),
549            arity: (captures.len() + params.len()) as u16,
550            locals_count: 0,
551            code: Vec::new(),
552            effects: Vec::new(),
553        });
554
555        // Emit code at the lambda site: load each captured local, then MakeClosure.
556        for c in &captures {
557            let slot = *self.locals.get(c).expect("free var must be in scope");
558            self.emit(Op::LoadLocal(slot));
559        }
560        self.emit(Op::MakeClosure { fn_id, capture_count: captures.len() as u16 });
561
562        // Queue the body for later compilation.
563        self.pending_lambdas.push(PendingLambda {
564            fn_id,
565            capture_names: captures,
566            params: params.to_vec(),
567            body: body.clone(),
568        });
569    }
570
571    /// Higher-order stdlib ops on Result/Option whose function arg is a
572    /// closure. Emit inline: pattern-match on the variant, invoke the
573    /// closure when applicable, return wrapped result.
574    fn try_emit_higher_order(
575        &mut self,
576        module: &str,
577        op: &str,
578        args: &[a::CExpr],
579        node_id_idx: u32,
580    ) -> bool {
581        match (module, op) {
582            ("result", "map") => self.emit_variant_map(args, "Ok", true),
583            ("result", "and_then") => self.emit_variant_map(args, "Ok", false),
584            ("result", "map_err") => self.emit_variant_map(args, "Err", true),
585            ("option", "map") => self.emit_variant_map(args, "Some", true),
586            ("option", "and_then") => self.emit_variant_map(args, "Some", false),
587            ("list", "map") => self.emit_list_map(args),
588            ("list", "filter") => self.emit_list_filter(args),
589            ("list", "fold") => self.emit_list_fold(args),
590            ("map", "fold") => self.emit_map_fold(args, node_id_idx),
591            ("flow", "sequential") => self.emit_flow_sequential(args),
592            ("flow", "branch") => self.emit_flow_branch(args),
593            ("flow", "retry") => self.emit_flow_retry(args),
594            ("flow", "parallel") => self.emit_flow_parallel(args),
595            ("flow", "parallel_list") => self.emit_flow_parallel_list(args),
596            _ => return false,
597        }
598        true
599    }
600
601    /// `list.map(xs, f)` — inline loop applying `f` to each element.
602    /// Stack contract: pushes the resulting List.
603    fn emit_list_map(&mut self, args: &[a::CExpr]) {
604        // Compile xs and f, store both as locals.
605        self.compile_expr(&args[0], false);
606        let xs = self.alloc_local("__lm_xs");
607        self.emit(Op::StoreLocal(xs));
608        self.compile_expr(&args[1], false);
609        let f = self.alloc_local("__lm_f");
610        self.emit(Op::StoreLocal(f));
611
612        // out := []
613        self.emit(Op::MakeList(0));
614        let out = self.alloc_local("__lm_out");
615        self.emit(Op::StoreLocal(out));
616
617        // i := 0
618        let zero = self.pool.int(0);
619        self.emit(Op::PushConst(zero));
620        let i = self.alloc_local("__lm_i");
621        self.emit(Op::StoreLocal(i));
622
623        // loop_top: while i < len(xs) { ... }
624        let loop_top = self.code.len();
625        self.emit(Op::LoadLocal(i));
626        self.emit(Op::LoadLocal(xs));
627        self.emit(Op::GetListLen);
628        self.emit(Op::NumLt);
629        let j_exit = self.code.len();
630        self.emit(Op::JumpIfNot(0));
631
632        // body: out := out ++ [f(xs[i])]
633        let nid = self.pool.node_id("n_list_map");
634        self.emit(Op::LoadLocal(out));
635        self.emit(Op::LoadLocal(f));
636        self.emit(Op::LoadLocal(xs));
637        self.emit(Op::LoadLocal(i));
638        self.emit(Op::GetListElemDyn);
639        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
640        self.emit(Op::ListAppend);
641        self.emit(Op::StoreLocal(out));
642
643        // i := i + 1
644        self.emit(Op::LoadLocal(i));
645        let one = self.pool.int(1);
646        self.emit(Op::PushConst(one));
647        self.emit(Op::NumAdd);
648        self.emit(Op::StoreLocal(i));
649
650        // jump back
651        let jump_back = self.code.len();
652        let back = (loop_top as i32) - (jump_back as i32 + 1);
653        self.emit(Op::Jump(back));
654
655        // exit: patch j_exit, push out
656        let exit_target = self.code.len() as i32;
657        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
658            *off = exit_target - (j_exit as i32 + 1);
659        }
660        self.emit(Op::LoadLocal(out));
661    }
662
663    /// `list.filter(xs, pred)` — keep elements where pred returns true.
664    fn emit_list_filter(&mut self, args: &[a::CExpr]) {
665        self.compile_expr(&args[0], false);
666        let xs = self.alloc_local("__lf_xs");
667        self.emit(Op::StoreLocal(xs));
668        self.compile_expr(&args[1], false);
669        let f = self.alloc_local("__lf_f");
670        self.emit(Op::StoreLocal(f));
671
672        self.emit(Op::MakeList(0));
673        let out = self.alloc_local("__lf_out");
674        self.emit(Op::StoreLocal(out));
675
676        let zero = self.pool.int(0);
677        self.emit(Op::PushConst(zero));
678        let i = self.alloc_local("__lf_i");
679        self.emit(Op::StoreLocal(i));
680
681        let loop_top = self.code.len();
682        self.emit(Op::LoadLocal(i));
683        self.emit(Op::LoadLocal(xs));
684        self.emit(Op::GetListLen);
685        self.emit(Op::NumLt);
686        let j_exit = self.code.len();
687        self.emit(Op::JumpIfNot(0));
688
689        // x := xs[i]
690        self.emit(Op::LoadLocal(xs));
691        self.emit(Op::LoadLocal(i));
692        self.emit(Op::GetListElemDyn);
693        let x = self.alloc_local("__lf_x");
694        self.emit(Op::StoreLocal(x));
695
696        // if pred(x) { out := out ++ [x] }
697        let nid = self.pool.node_id("n_list_filter");
698        self.emit(Op::LoadLocal(f));
699        self.emit(Op::LoadLocal(x));
700        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
701        let j_skip = self.code.len();
702        self.emit(Op::JumpIfNot(0));
703
704        self.emit(Op::LoadLocal(out));
705        self.emit(Op::LoadLocal(x));
706        self.emit(Op::ListAppend);
707        self.emit(Op::StoreLocal(out));
708
709        let skip_target = self.code.len() as i32;
710        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
711            *off = skip_target - (j_skip as i32 + 1);
712        }
713
714        // i := i + 1
715        self.emit(Op::LoadLocal(i));
716        let one = self.pool.int(1);
717        self.emit(Op::PushConst(one));
718        self.emit(Op::NumAdd);
719        self.emit(Op::StoreLocal(i));
720
721        let jump_back = self.code.len();
722        let back = (loop_top as i32) - (jump_back as i32 + 1);
723        self.emit(Op::Jump(back));
724
725        let exit_target = self.code.len() as i32;
726        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
727            *off = exit_target - (j_exit as i32 + 1);
728        }
729        self.emit(Op::LoadLocal(out));
730    }
731
732    /// `list.fold(xs, init, f)` — left fold with two-arg combiner.
733    fn emit_list_fold(&mut self, args: &[a::CExpr]) {
734        // args: xs, init, f
735        self.compile_expr(&args[0], false);
736        let xs = self.alloc_local("__lo_xs");
737        self.emit(Op::StoreLocal(xs));
738        self.compile_expr(&args[1], false);
739        let acc = self.alloc_local("__lo_acc");
740        self.emit(Op::StoreLocal(acc));
741        self.compile_expr(&args[2], false);
742        let f = self.alloc_local("__lo_f");
743        self.emit(Op::StoreLocal(f));
744
745        let zero = self.pool.int(0);
746        self.emit(Op::PushConst(zero));
747        let i = self.alloc_local("__lo_i");
748        self.emit(Op::StoreLocal(i));
749
750        let loop_top = self.code.len();
751        self.emit(Op::LoadLocal(i));
752        self.emit(Op::LoadLocal(xs));
753        self.emit(Op::GetListLen);
754        self.emit(Op::NumLt);
755        let j_exit = self.code.len();
756        self.emit(Op::JumpIfNot(0));
757
758        // acc := f(acc, xs[i])
759        let nid = self.pool.node_id("n_list_fold");
760        self.emit(Op::LoadLocal(f));
761        self.emit(Op::LoadLocal(acc));
762        self.emit(Op::LoadLocal(xs));
763        self.emit(Op::LoadLocal(i));
764        self.emit(Op::GetListElemDyn);
765        self.emit(Op::CallClosure { arity: 2, node_id_idx: nid });
766        self.emit(Op::StoreLocal(acc));
767
768        // i := i + 1
769        self.emit(Op::LoadLocal(i));
770        let one = self.pool.int(1);
771        self.emit(Op::PushConst(one));
772        self.emit(Op::NumAdd);
773        self.emit(Op::StoreLocal(i));
774
775        let jump_back = self.code.len();
776        let back = (loop_top as i32) - (jump_back as i32 + 1);
777        self.emit(Op::Jump(back));
778
779        let exit_target = self.code.len() as i32;
780        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
781            *off = exit_target - (j_exit as i32 + 1);
782        }
783        self.emit(Op::LoadLocal(acc));
784    }
785
786    /// `map.fold(m, init, f)` — left fold over `Map[K, V]` entries with a
787    /// three-arg combiner `f(acc, k, v)`. Iteration order matches
788    /// `map.entries` (BTreeMap-sorted by key). Materializes the entry
789    /// list once via the runtime's `("map", "entries")` op, then runs
790    /// the same inline loop as `list.fold`.
791    fn emit_map_fold(&mut self, args: &[a::CExpr], node_id_idx: u32) {
792        // xs := map.entries(m)
793        self.compile_expr(&args[0], false);
794        let map_kind = self.pool.str("map");
795        let entries_op = self.pool.str("entries");
796        self.emit(Op::EffectCall {
797            kind_idx: map_kind,
798            op_idx: entries_op,
799            arity: 1,
800            node_id_idx,
801        });
802        let xs = self.alloc_local("__mf_xs");
803        self.emit(Op::StoreLocal(xs));
804
805        // acc := init
806        self.compile_expr(&args[1], false);
807        let acc = self.alloc_local("__mf_acc");
808        self.emit(Op::StoreLocal(acc));
809
810        // f := <closure>
811        self.compile_expr(&args[2], false);
812        let f = self.alloc_local("__mf_f");
813        self.emit(Op::StoreLocal(f));
814
815        // i := 0
816        let zero = self.pool.int(0);
817        self.emit(Op::PushConst(zero));
818        let i = self.alloc_local("__mf_i");
819        self.emit(Op::StoreLocal(i));
820
821        // loop_top: while i < len(xs)
822        let loop_top = self.code.len();
823        self.emit(Op::LoadLocal(i));
824        self.emit(Op::LoadLocal(xs));
825        self.emit(Op::GetListLen);
826        self.emit(Op::NumLt);
827        let j_exit = self.code.len();
828        self.emit(Op::JumpIfNot(0));
829
830        // pair := xs[i]
831        self.emit(Op::LoadLocal(xs));
832        self.emit(Op::LoadLocal(i));
833        self.emit(Op::GetListElemDyn);
834        let pair = self.alloc_local("__mf_pair");
835        self.emit(Op::StoreLocal(pair));
836
837        // acc := f(acc, pair.0, pair.1)
838        let nid = self.pool.node_id("n_map_fold");
839        self.emit(Op::LoadLocal(f));
840        self.emit(Op::LoadLocal(acc));
841        self.emit(Op::LoadLocal(pair));
842        self.emit(Op::GetElem(0));
843        self.emit(Op::LoadLocal(pair));
844        self.emit(Op::GetElem(1));
845        self.emit(Op::CallClosure { arity: 3, node_id_idx: nid });
846        self.emit(Op::StoreLocal(acc));
847
848        // i := i + 1
849        self.emit(Op::LoadLocal(i));
850        let one = self.pool.int(1);
851        self.emit(Op::PushConst(one));
852        self.emit(Op::NumAdd);
853        self.emit(Op::StoreLocal(i));
854
855        let jump_back = self.code.len();
856        let back = (loop_top as i32) - (jump_back as i32 + 1);
857        self.emit(Op::Jump(back));
858
859        let exit_target = self.code.len() as i32;
860        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
861            *off = exit_target - (j_exit as i32 + 1);
862        }
863        self.emit(Op::LoadLocal(acc));
864    }
865
866    /// Inline pattern: `<module>.map(v, f)` and friends.
867    /// `wrap_with`: variant tag whose payload triggers the call (Ok / Some / Err).
868    /// `wrap_result`: if true, wrap the closure's result back in `wrap_with`
869    /// (map shape); if false, expect the closure to return a wrapped value
870    /// itself (and_then shape).
871    fn emit_variant_map(
872        &mut self,
873        args: &[a::CExpr],
874        wrap_with: &str,
875        wrap_result: bool,
876    ) {
877        // args[0] = the wrapped value (Result/Option), args[1] = closure
878        let wrap_idx = self.pool.variant(wrap_with);
879
880        // Compile and store the value into a local, evaluate closure on top of stack.
881        self.compile_expr(&args[0], false);
882        let val_slot = self.alloc_local("__hov");
883        self.emit(Op::StoreLocal(val_slot));
884
885        self.compile_expr(&args[1], false);
886        let f_slot = self.alloc_local("__hof");
887        self.emit(Op::StoreLocal(f_slot));
888
889        // Stack discipline:
890        //   load val ⇒ [v]
891        //   dup     ⇒ [v, v]
892        //   test    ⇒ [v, Bool]
893        //   jumpifnot ⇒ [v]
894        // Both branches end with [v] before the branch body.
895        self.emit(Op::LoadLocal(val_slot));
896        self.emit(Op::Dup);
897        self.emit(Op::TestVariant(wrap_idx));
898        let j_skip = self.code.len();
899        self.emit(Op::JumpIfNot(0));
900
901        // Matched arm: extract payload, call closure on it.
902        self.emit(Op::GetVariantArg(0));
903        let arg_slot = self.alloc_local("__hov_arg");
904        self.emit(Op::StoreLocal(arg_slot));
905        self.emit(Op::LoadLocal(f_slot));
906        self.emit(Op::LoadLocal(arg_slot));
907        let nid = self.pool.node_id("n_hov");
908        self.emit(Op::CallClosure { arity: 1, node_id_idx: nid });
909        if wrap_result {
910            self.emit(Op::MakeVariant { name_idx: wrap_idx, arity: 1 });
911        }
912        let j_end = self.code.len();
913        self.emit(Op::Jump(0));
914
915        // Skip arm: stack already has [v] from the failed Dup; nothing to do.
916        let skip_target = self.code.len() as i32;
917        if let Op::JumpIfNot(off) = &mut self.code[j_skip] {
918            *off = skip_target - (j_skip as i32 + 1);
919        }
920
921        let end_target = self.code.len() as i32;
922        if let Op::Jump(off) = &mut self.code[j_end] {
923            *off = end_target - (j_end as i32 + 1);
924        }
925    }
926
927    // ---- std.flow trampolines ----------------------------------------
928    //
929    // Each flow.<op>(c1, c2, ...) call site:
930    //   1. compiles its closure args and leaves them on the stack
931    //   2. registers a fresh "trampoline" Function whose body invokes
932    //      those captured closures appropriately
933    //   3. emits MakeClosure { fn_id: trampoline, capture_count: N }
934    //
935    // The trampoline's parameter layout is [capture_0, ..., capture_{N-1},
936    // arg_0, ...]: captures first, the closure's own args after.
937
938    /// Allocate a fresh fn_id for a trampoline and install its bytecode.
939    fn install_trampoline(&mut self, name: &str, arity: u16, locals_count: u16, code: Vec<Op>) -> u32 {
940        let fn_id = self.next_fn_id.len() as u32;
941        self.next_fn_id.push(Function {
942            name: name.into(),
943            arity,
944            locals_count,
945            code,
946            effects: Vec::new(),
947        });
948        fn_id
949    }
950
951    /// `flow.sequential(f, g)` returns a closure `(x) -> g(f(x))`.
952    fn emit_flow_sequential(&mut self, args: &[a::CExpr]) {
953        // Push f, g; build the trampoline closure with 2 captures.
954        self.compile_expr(&args[0], false);
955        self.compile_expr(&args[1], false);
956        let nid = self.pool.node_id("n_flow_sequential");
957        let code = vec![
958            // Locals: [f=0, g=1, x=2]
959            Op::LoadLocal(0),                                  // push f
960            Op::LoadLocal(2),                                  // push x
961            Op::CallClosure { arity: 1, node_id_idx: nid },    // r = f(x)
962            // stack: [r]
963            Op::StoreLocal(3),                                 // tmp = r
964            Op::LoadLocal(1),                                  // push g
965            Op::LoadLocal(3),                                  // push tmp
966            Op::CallClosure { arity: 1, node_id_idx: nid },    // r = g(tmp)
967            Op::Return,
968        ];
969        let fn_id = self.install_trampoline("__flow_sequential", 3, 4, code);
970        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
971    }
972
973    /// `flow.parallel(fa, fb)` returns a closure `() -> (fa(), fb())`.
974    /// Implementation is sequential: each function is called in order
975    /// and the results are packed into a 2-tuple. The spec (§11.2)
976    /// allows the runtime to apply true parallelism here; that needs
977    /// a thread-safe handler split and is left to a follow-up. The
978    /// signature is what users program against — sequential vs threaded
979    /// is an implementation detail invisible to the type system.
980    fn emit_flow_parallel(&mut self, args: &[a::CExpr]) {
981        // Push fa, fb; build a 0-arg trampoline closure with 2 captures.
982        self.compile_expr(&args[0], false);
983        self.compile_expr(&args[1], false);
984        let nid = self.pool.node_id("n_flow_parallel");
985        let code = vec![
986            // Locals: [fa=0, fb=1]
987            Op::LoadLocal(0),                                  // push fa
988            Op::CallClosure { arity: 0, node_id_idx: nid },    // a = fa()
989            Op::LoadLocal(1),                                  // push fb
990            Op::CallClosure { arity: 0, node_id_idx: nid },    // b = fb()
991            Op::MakeTuple(2),                                  // (a, b)
992            Op::Return,
993        ];
994        let fn_id = self.install_trampoline("__flow_parallel", 2, 2, code);
995        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
996    }
997
998    /// `flow.parallel_list(actions)` runs each 0-arg closure in `actions`
999    /// and returns the results as a list in input order. Variadic
1000    /// counterpart to `flow.parallel`. Sequential under the hood — the
1001    /// spec (§11.2) reserves true threading for a future scheduler.
1002    /// Compiled inline (mirrors `list.map`) so closure args can flow
1003    /// through `CallClosure` without a heap-allocated trampoline.
1004    fn emit_flow_parallel_list(&mut self, args: &[a::CExpr]) {
1005        // xs := actions
1006        self.compile_expr(&args[0], false);
1007        let xs = self.alloc_local("__fpl_xs");
1008        self.emit(Op::StoreLocal(xs));
1009
1010        // out := []
1011        self.emit(Op::MakeList(0));
1012        let out = self.alloc_local("__fpl_out");
1013        self.emit(Op::StoreLocal(out));
1014
1015        // i := 0
1016        let zero = self.pool.int(0);
1017        self.emit(Op::PushConst(zero));
1018        let i = self.alloc_local("__fpl_i");
1019        self.emit(Op::StoreLocal(i));
1020
1021        // loop_top: while i < len(xs) { ... }
1022        let loop_top = self.code.len();
1023        self.emit(Op::LoadLocal(i));
1024        self.emit(Op::LoadLocal(xs));
1025        self.emit(Op::GetListLen);
1026        self.emit(Op::NumLt);
1027        let j_exit = self.code.len();
1028        self.emit(Op::JumpIfNot(0));
1029
1030        // body: out := out ++ [xs[i]()]
1031        let nid = self.pool.node_id("n_flow_parallel_list");
1032        self.emit(Op::LoadLocal(out));
1033        self.emit(Op::LoadLocal(xs));
1034        self.emit(Op::LoadLocal(i));
1035        self.emit(Op::GetListElemDyn);
1036        self.emit(Op::CallClosure { arity: 0, node_id_idx: nid });
1037        self.emit(Op::ListAppend);
1038        self.emit(Op::StoreLocal(out));
1039
1040        // i := i + 1
1041        self.emit(Op::LoadLocal(i));
1042        let one = self.pool.int(1);
1043        self.emit(Op::PushConst(one));
1044        self.emit(Op::NumAdd);
1045        self.emit(Op::StoreLocal(i));
1046
1047        // jump back
1048        let jump_back = self.code.len();
1049        let back = (loop_top as i32) - (jump_back as i32 + 1);
1050        self.emit(Op::Jump(back));
1051
1052        // exit: patch j_exit, push out
1053        let exit_target = self.code.len() as i32;
1054        if let Op::JumpIfNot(off) = &mut self.code[j_exit] {
1055            *off = exit_target - (j_exit as i32 + 1);
1056        }
1057        self.emit(Op::LoadLocal(out));
1058    }
1059
1060    /// `flow.branch(cond, t, f)` returns a closure `(x) -> if cond(x) then t(x) else f(x)`.
1061    fn emit_flow_branch(&mut self, args: &[a::CExpr]) {
1062        self.compile_expr(&args[0], false);
1063        self.compile_expr(&args[1], false);
1064        self.compile_expr(&args[2], false);
1065        let nid = self.pool.node_id("n_flow_branch");
1066        let mut code = vec![
1067            // Locals: [cond=0, t=1, f=2, x=3]
1068            Op::LoadLocal(0),                               // push cond
1069            Op::LoadLocal(3),                               // push x
1070            Op::CallClosure { arity: 1, node_id_idx: nid }, // bool
1071        ];
1072        let j_false = code.len();
1073        code.push(Op::JumpIfNot(0));                        // patched
1074        // true arm: t(x)
1075        code.push(Op::LoadLocal(1));
1076        code.push(Op::LoadLocal(3));
1077        code.push(Op::CallClosure { arity: 1, node_id_idx: nid });
1078        code.push(Op::Return);
1079        // false arm
1080        let false_target = code.len() as i32;
1081        if let Op::JumpIfNot(off) = &mut code[j_false] {
1082            *off = false_target - (j_false as i32 + 1);
1083        }
1084        code.push(Op::LoadLocal(2));
1085        code.push(Op::LoadLocal(3));
1086        code.push(Op::CallClosure { arity: 1, node_id_idx: nid });
1087        code.push(Op::Return);
1088
1089        let fn_id = self.install_trampoline("__flow_branch", 4, 4, code);
1090        self.emit(Op::MakeClosure { fn_id, capture_count: 3 });
1091    }
1092
1093    /// `flow.retry(f, max_attempts)` returns a closure `(x) -> Result[U, E]`
1094    /// that calls `f(x)` up to `max_attempts` times, returning the first
1095    /// `Ok` or the final `Err`.
1096    fn emit_flow_retry(&mut self, args: &[a::CExpr]) {
1097        self.compile_expr(&args[0], false);
1098        self.compile_expr(&args[1], false);
1099        let call_nid = self.pool.node_id("n_flow_retry");
1100        let ok_idx = self.pool.variant("Ok");
1101        let zero_const = self.pool.int(0);
1102        let one_const = self.pool.int(1);
1103        // Locals: [f=0, max=1, x=2, i=3, last=4]
1104        let mut code = vec![
1105            // i := 0
1106            Op::PushConst(zero_const),
1107            Op::StoreLocal(3),
1108        ];
1109        // loop_top: while i < max
1110        let loop_top = code.len() as i32;
1111        code.push(Op::LoadLocal(3));
1112        code.push(Op::LoadLocal(1));
1113        code.push(Op::NumLt);
1114        let j_done = code.len();
1115        code.push(Op::JumpIfNot(0));                       // patched
1116
1117        // body: r := f(x); last := r
1118        code.push(Op::LoadLocal(0));
1119        code.push(Op::LoadLocal(2));
1120        code.push(Op::CallClosure { arity: 1, node_id_idx: call_nid });
1121        code.push(Op::StoreLocal(4));
1122
1123        // Test variant Ok on last; if so, return last.
1124        code.push(Op::LoadLocal(4));
1125        code.push(Op::TestVariant(ok_idx));
1126        let j_was_err = code.len();
1127        code.push(Op::JumpIfNot(0));                       // patched: skip return
1128        code.push(Op::LoadLocal(4));
1129        code.push(Op::Return);
1130
1131        // was_err: i := i + 1; jump loop_top
1132        let was_err_target = code.len() as i32;
1133        if let Op::JumpIfNot(off) = &mut code[j_was_err] {
1134            *off = was_err_target - (j_was_err as i32 + 1);
1135        }
1136        code.push(Op::LoadLocal(3));
1137        code.push(Op::PushConst(one_const));
1138        code.push(Op::NumAdd);
1139        code.push(Op::StoreLocal(3));
1140        let pc_after_jump = code.len() as i32 + 1;
1141        code.push(Op::Jump(loop_top - pc_after_jump));
1142
1143        // done: return last (the final Err, or Unit if max=0).
1144        let done_target = code.len() as i32;
1145        if let Op::JumpIfNot(off) = &mut code[j_done] {
1146            *off = done_target - (j_done as i32 + 1);
1147        }
1148        code.push(Op::LoadLocal(4));
1149        code.push(Op::Return);
1150
1151        let fn_id = self.install_trampoline("__flow_retry", 3, 5, code);
1152        self.emit(Op::MakeClosure { fn_id, capture_count: 2 });
1153    }
1154}
1155
1156/// Collect free variables referenced in `e` that are not in `bound`.
1157/// Mutates `bound` to track let/lambda introductions during the walk;
1158/// the caller's set is preserved on return because Rust's borrow rules
1159/// force us to clone for sub-scopes that rebind a name.
1160fn free_vars(e: &a::CExpr, bound: &mut std::collections::HashSet<String>, out: &mut Vec<String>) {
1161    match e {
1162        a::CExpr::Literal { .. } => {}
1163        a::CExpr::Var { name } => {
1164            if !bound.contains(name) && !out.contains(name) {
1165                out.push(name.clone());
1166            }
1167        }
1168        a::CExpr::Call { callee, args } => {
1169            free_vars(callee, bound, out);
1170            for a in args { free_vars(a, bound, out); }
1171        }
1172        a::CExpr::Let { name, value, body, .. } => {
1173            free_vars(value, bound, out);
1174            let was_bound = bound.contains(name);
1175            bound.insert(name.clone());
1176            free_vars(body, bound, out);
1177            if !was_bound { bound.remove(name); }
1178        }
1179        a::CExpr::Match { scrutinee, arms } => {
1180            free_vars(scrutinee, bound, out);
1181            for arm in arms {
1182                let mut local_bound = bound.clone();
1183                pattern_binders(&arm.pattern, &mut local_bound);
1184                free_vars(&arm.body, &mut local_bound, out);
1185            }
1186        }
1187        a::CExpr::Block { statements, result } => {
1188            let mut local_bound = bound.clone();
1189            for s in statements { free_vars(s, &mut local_bound, out); }
1190            free_vars(result, &mut local_bound, out);
1191        }
1192        a::CExpr::Constructor { args, .. } => {
1193            for a in args { free_vars(a, bound, out); }
1194        }
1195        a::CExpr::RecordLit { fields } => {
1196            for f in fields { free_vars(&f.value, bound, out); }
1197        }
1198        a::CExpr::TupleLit { items } | a::CExpr::ListLit { items } => {
1199            for it in items { free_vars(it, bound, out); }
1200        }
1201        a::CExpr::FieldAccess { value, .. } => free_vars(value, bound, out),
1202        a::CExpr::Lambda { params, body, .. } => {
1203            let mut inner = bound.clone();
1204            for p in params { inner.insert(p.name.clone()); }
1205            free_vars(body, &mut inner, out);
1206        }
1207        a::CExpr::BinOp { lhs, rhs, .. } => {
1208            free_vars(lhs, bound, out);
1209            free_vars(rhs, bound, out);
1210        }
1211        a::CExpr::UnaryOp { expr, .. } => free_vars(expr, bound, out),
1212        a::CExpr::Return { value } => free_vars(value, bound, out),
1213    }
1214}
1215
1216fn pattern_binders(p: &a::Pattern, bound: &mut std::collections::HashSet<String>) {
1217    match p {
1218        a::Pattern::PWild | a::Pattern::PLiteral { .. } => {}
1219        a::Pattern::PVar { name } => { bound.insert(name.clone()); }
1220        a::Pattern::PConstructor { args, .. } => {
1221            for a in args { pattern_binders(a, bound); }
1222        }
1223        a::Pattern::PRecord { fields } => {
1224            for f in fields { pattern_binders(&f.pattern, bound); }
1225        }
1226        a::Pattern::PTuple { items } => {
1227            for it in items { pattern_binders(it, bound); }
1228        }
1229    }
1230}