Skip to main content

kore/codegen/
wasm.rs

1//! WASM Code Generation using walrus
2//! 
3//! This module converts the Typed AST into WebAssembly.
4
5use crate::ast::{Expr, BinaryOp, Stmt, Block};
6use crate::types::{ResolvedType, TypedFunction, TypedItem, TypedProgram};
7use crate::error::{KoreResult, KoreError};
8use walrus::{FunctionBuilder, InstrSeqBuilder, LocalId, Module, ModuleConfig, ValType};
9use std::collections::HashMap;
10
11pub fn generate(program: &TypedProgram) -> KoreResult<Vec<u8>> {
12    let mut compiler = WasmCompiler::new();
13    compiler.compile_program(program)?;
14    Ok(compiler.module.emit_wasm())
15}
16
17struct WasmCompiler {
18    module: Module,
19    /// Map function names to their WASM function IDs for call resolution
20    functions: HashMap<String, walrus::FunctionId>,
21    /// Memory ID for linear memory
22    memory_id: Option<walrus::MemoryId>,
23    heap_ptr_global: walrus::GlobalId,
24    /// Current offset in data segment for string allocation
25    data_offset: u32,
26    /// Map string literals to their memory offset (for deduplication)
27    string_table: HashMap<String, u32>,
28    /// Struct layouts: struct_name -> (field_name -> offset, total_size)
29    struct_layouts: HashMap<String, (HashMap<String, u32>, u32)>,
30    /// Enum layouts: enum_name -> (variant_name -> tag, max_payload_size, variant_name -> (field_name -> offset))
31    enum_layouts: HashMap<String, (HashMap<String, u32>, u32, HashMap<String, HashMap<String, u32>>)>,
32    /// Heap pointer (for runtime allocation) - starts after data segment
33    // heap_ptr: u32, // Unused
34    /// Funcref table for indirect calls (closures)
35    funcref_table: Option<walrus::TableId>,
36    /// Counter for generating unique lambda names
37    lambda_counter: u32,
38    /// Map lambda ID -> (table_index, func_id) for indirect calls
39    lambda_table: HashMap<u32, (u32, walrus::FunctionId)>,
40}
41
42// Separate Context from Builder to avoid self-borrow issues
43// Locals are pre-allocated, so we don't need mutable access during emission
44struct CompilationContext<'a> {
45    locals: HashMap<String, LocalId>,
46    functions: &'a HashMap<String, walrus::FunctionId>,
47    string_table: &'a HashMap<String, u32>,
48    struct_layouts: &'a HashMap<String, (HashMap<String, u32>, u32)>,
49    enum_layouts: &'a HashMap<String, (HashMap<String, u32>, u32, HashMap<String, HashMap<String, u32>>)>,
50    memory_id: walrus::MemoryId,
51    heap_ptr_global: walrus::GlobalId,
52    tmp_i32: LocalId,
53    tmp_i32_2: LocalId,
54    tmp_i64: LocalId,
55    funcref_table: Option<walrus::TableId>,
56    lambda_table: &'a HashMap<u32, (u32, walrus::FunctionId)>,
57}
58
59impl WasmCompiler {
60    fn new() -> Self {
61        let config = ModuleConfig::new();
62        let mut module = Module::with_config(config);
63        
64        // Create linear memory (1 page = 64KB)
65        // add_local(shared, memory64, initial, maximum, page_size_log2)
66        let memory_id = module.memories.add_local(false, false, 1, None, None);
67        module.exports.add("memory", memory_id);
68
69        let heap_ptr = 4096u32;
70        let heap_ptr_global = module.globals.add_local(
71            ValType::I32,
72            true,
73            false, // shared
74            walrus::ConstExpr::Value(walrus::ir::Value::I32(heap_ptr as i32)),
75        );
76        
77        // --- WASM Host Imports for I/O ---
78        let mut functions = HashMap::new();
79        
80        // print_i64(value: i64) -> void
81        let print_i64_type = module.types.add(&[ValType::I64], &[]);
82        let (print_i64_func, _) = module.add_import_func("host", "print_i64", print_i64_type);
83        functions.insert("print_i64".to_string(), print_i64_func);
84        
85        // print_f64(value: f64) -> void
86        let print_f64_type = module.types.add(&[ValType::F64], &[]);
87        let (print_f64_func, _) = module.add_import_func("host", "print_f64", print_f64_type);
88        functions.insert("print_f64".to_string(), print_f64_func);
89        
90        // print_str(ptr: i32, len: i32) -> void
91        let print_str_type = module.types.add(&[ValType::I32, ValType::I32], &[]);
92        let (print_str_func, _) = module.add_import_func("host", "print_str", print_str_type);
93        functions.insert("print_str".to_string(), print_str_func);
94        
95        // print_bool(value: i32) -> void  
96        let print_bool_type = module.types.add(&[ValType::I32], &[]);
97        let (print_bool_func, _) = module.add_import_func("host", "print_bool", print_bool_type);
98        functions.insert("print_bool".to_string(), print_bool_func);
99        
100        // read_i64() -> i64
101        let read_i64_type = module.types.add(&[], &[ValType::I64]);
102        let (read_i64_func, _) = module.add_import_func("host", "read_i64", read_i64_type);
103        functions.insert("read_i64".to_string(), read_i64_func);
104
105        // int_to_str(val: i64) -> ptr: i32
106        let int_to_str_type = module.types.add(&[ValType::I64], &[ValType::I32]);
107        let (int_to_str_func, _) = module.add_import_func("host", "int_to_str", int_to_str_type);
108        functions.insert("int_to_str".to_string(), int_to_str_func);
109
110        // str_concat(ptr1: i32, len1: i32, ptr2: i32, len2: i32) -> ptr: i32
111        // Note: For simplicity, we'll assume strings are just pointers in this specific hack, 
112        // but robustly we need lengths.
113        // If our runtime strings are (ptr, len), we can't easily pass them as single values.
114        // Let's assume the host handles "String Objects" via pointers for concatenation.
115        // BUT `print_str` takes (ptr, len). 
116        // Let's change strategy: strings are pointers to [len: i32, data...].
117        // So we just pass pointers.
118        let str_concat_type = module.types.add(&[ValType::I32, ValType::I32], &[ValType::I32]);
119        let (str_concat_func, _) = module.add_import_func("host", "str_concat", str_concat_type);
120        functions.insert("str_concat".to_string(), str_concat_func);
121
122        // time_now() -> i64
123        let time_now_type = module.types.add(&[], &[ValType::I64]);
124        let (time_now_func, _) = module.add_import_func("host", "time_now", time_now_type);
125        functions.insert("time_now".to_string(), time_now_func);
126
127        // --- DOM Imports ---
128        // dom_create(tag_ptr: i32, tag_len: i32) -> node_id: i32
129        let dom_create_type = module.types.add(&[ValType::I32, ValType::I32], &[ValType::I32]);
130        let (dom_create_func, _) = module.add_import_func("host", "dom_create", dom_create_type);
131        functions.insert("dom_create".to_string(), dom_create_func);
132
133        // dom_append(parent_id: i32, child_id: i32) -> void
134        let dom_append_type = module.types.add(&[ValType::I32, ValType::I32], &[]);
135        let (dom_append_func, _) = module.add_import_func("host", "dom_append", dom_append_type);
136        functions.insert("dom_append".to_string(), dom_append_func);
137
138        // dom_attr(node_id: i32, key_ptr: i32, key_len: i32, val_ptr: i32, val_len: i32) -> void
139        let dom_attr_type = module.types.add(&[ValType::I32, ValType::I32, ValType::I32, ValType::I32, ValType::I32], &[]);
140        let (dom_attr_func, _) = module.add_import_func("host", "dom_attr", dom_attr_type);
141        functions.insert("dom_attr".to_string(), dom_attr_func);
142        
143        // dom_text(text_ptr: i32, text_len: i32) -> node_id: i32
144        let dom_text_type = module.types.add(&[ValType::I32, ValType::I32], &[ValType::I32]);
145        let (dom_text_func, _) = module.add_import_func("host", "dom_text", dom_text_type);
146        functions.insert("dom_text".to_string(), dom_text_func);
147        
148        // Create funcref table for closures/lambdas
149        // Starts with 16 slots, can grow as needed
150        let funcref_table = module.tables.add_local(false, 16, Some(256), walrus::RefType::Funcref);
151        
152        Self {
153            module,
154            functions,
155            memory_id: Some(memory_id),
156            heap_ptr_global,
157            data_offset: 0,
158            string_table: HashMap::new(),
159            struct_layouts: HashMap::new(),
160            enum_layouts: HashMap::new(),
161            // heap_ptr, // Unused
162            funcref_table: Some(funcref_table),
163            lambda_counter: 0,
164            lambda_table: HashMap::new(),
165        }
166    }
167
168    fn compile_program(&mut self, program: &TypedProgram) -> KoreResult<()> {
169        // First pass: collect struct layouts
170        for item in &program.items {
171            if let TypedItem::Struct(s) = item {
172                self.compute_struct_layout(s);
173            }
174            if let TypedItem::Component(c) = item {
175                self.compute_component_layout(c);
176            }
177        }
178
179        // Second pass: collect enum layouts
180        for item in &program.items {
181            if let TypedItem::Enum(e) = item {
182                self.compute_enum_layout(e);
183            }
184        }
185        
186        // Third pass: collect all string literals
187        for item in &program.items {
188            if let TypedItem::Function(f) = item {
189                self.collect_strings_in_block(&f.ast.body);
190            }
191        }
192
193        // Fourth pass: collect and compile all lambdas
194        let mut all_lambdas = Vec::new();
195        for item in &program.items {
196            if let TypedItem::Function(f) = item {
197                self.collect_lambdas_in_block(&f.ast.body, &mut all_lambdas);
198            }
199        }
200        // Compile each lambda to a WASM function
201        for (id, params, body) in all_lambdas {
202            self.compile_lambda(id, &params, &body)?;
203        }
204
205        // Fifth pass: declare functions (recursion support)
206        for item in &program.items {
207            if let TypedItem::Function(f) = item {
208                self.declare_function(f)?;
209            }
210        }
211        
212        // Fifth pass: compile function bodies
213        for item in &program.items {
214            match item {
215                TypedItem::Function(f) => {
216                    self.compile_function_body(f)?;
217                }
218                _ => {} 
219            }
220        }
221        
222        // Sixth pass: compile components
223        for item in &program.items {
224            if let TypedItem::Component(c) = item {
225                self.compile_component(c)?;
226            }
227        }
228        
229        Ok(())
230    }
231    
232    fn compute_struct_layout(&mut self, s: &crate::types::TypedStruct) {
233        let mut offset = 0u32;
234        let mut field_offsets = HashMap::new();
235        
236        for field in &s.ast.fields {
237            // Align to 4 bytes
238            offset = (offset + 3) & !3;
239            field_offsets.insert(field.name.clone(), offset);
240            
241            // Calculate field size based on type
242            let field_size = self.type_size_of(&s.field_types.get(&field.name).cloned().unwrap_or(ResolvedType::Int(crate::types::IntSize::I64)));
243            offset += field_size;
244        }
245        
246        // Align total size to 4 bytes
247        let total_size = (offset + 3) & !3;
248        self.struct_layouts.insert(s.ast.name.clone(), (field_offsets, total_size));
249    }
250
251    fn compute_component_layout(&mut self, c: &crate::types::TypedComponent) {
252        let mut offset = 0u32;
253        let mut field_offsets = HashMap::new();
254        
255        for state in &c.ast.state {
256            // Align to 4 bytes
257            offset = (offset + 3) & !3;
258            field_offsets.insert(state.name.clone(), offset);
259            
260            // Assume 8 bytes for now
261            offset += 8;
262        }
263        
264        let total_size = (offset + 3) & !3;
265        self.struct_layouts.insert(c.ast.name.clone(), (field_offsets, total_size));
266    }
267
268    fn compile_component(&mut self, c: &crate::types::TypedComponent) -> KoreResult<()> {
269        let render_name = format!("{}_render", c.ast.name);
270        
271        // Params: self (i32)
272        // Ret: VNode (i32)
273        let wasm_params = vec![ValType::I32];
274        let wasm_results = vec![ValType::I32];
275        
276        let mut builder = FunctionBuilder::new(&mut self.module.types, &wasm_params, &wasm_results);
277        let self_local = self.module.locals.add(ValType::I32);
278        
279        // Locals
280        let tmp_i32 = self.module.locals.add(ValType::I32);
281        let tmp_i32_2 = self.module.locals.add(ValType::I32);
282        let tmp_i64 = self.module.locals.add(ValType::I64);
283        
284        let mut locals_map = HashMap::new();
285        locals_map.insert("self".to_string(), self_local);
286        
287        let ctx = CompilationContext {
288            locals: locals_map,
289            functions: &self.functions,
290            string_table: &self.string_table,
291            struct_layouts: &self.struct_layouts,
292            enum_layouts: &self.enum_layouts,
293            memory_id: self.memory_id.unwrap(),
294            heap_ptr_global: self.heap_ptr_global,
295            tmp_i32,
296            tmp_i32_2,
297            tmp_i64,
298            funcref_table: self.funcref_table,
299            lambda_table: &self.lambda_table,
300        };
301        
302        let mut func_body = builder.func_body();
303        self.compile_jsx_node(&ctx, &mut func_body, &c.ast.body)?;
304        
305        let func_id = builder.finish(vec![self_local], &mut self.module.funcs);
306        self.functions.insert(render_name.clone(), func_id);
307        self.module.exports.add(&render_name, func_id);
308        
309        Ok(())
310    }
311
312    fn compute_enum_layout(&mut self, e: &crate::types::TypedEnum) {
313        let mut variant_tags = HashMap::new();
314        let mut max_payload_size = 0u32;
315        let mut variant_field_offsets = HashMap::new();
316
317        for (idx, variant) in e.ast.variants.iter().enumerate() {
318            variant_tags.insert(variant.name.clone(), idx as u32);
319            let mut field_offsets = HashMap::new();
320            let mut payload_size = 0u32;
321
322            if let Some(payload_types) = e.variant_payload_types.get(&variant.name) {
323                let mut current_offset = 0u32;
324                
325                // Determine offsets based on variant type
326                match &variant.fields {
327                    crate::ast::VariantFields::Struct(fields) => {
328                         for (i, field) in fields.iter().enumerate() {
329                             if let Some(ty) = payload_types.get(i) {
330                                 // Align to 4 bytes for simplicity (WASM is 32-bit mostly)
331                                 current_offset = (current_offset + 3) & !3;
332                                 field_offsets.insert(field.name.clone(), current_offset);
333                                 
334                                 let size = self.type_size_of(ty);
335                                 current_offset += size;
336                             }
337                         }
338                    }
339                    crate::ast::VariantFields::Tuple(_) => {
340                         for (i, ty) in payload_types.iter().enumerate() {
341                             current_offset = (current_offset + 3) & !3;
342                             field_offsets.insert(i.to_string(), current_offset);
343                             current_offset += self.type_size_of(ty);
344                         }
345                    }
346                    crate::ast::VariantFields::Unit => {}
347                }
348                
349                // Align final size
350                payload_size = (current_offset + 3) & !3;
351            }
352
353            variant_field_offsets.insert(variant.name.clone(), field_offsets);
354            max_payload_size = max_payload_size.max(payload_size);
355        }
356
357        self.enum_layouts
358            .insert(e.ast.name.clone(), (variant_tags, max_payload_size, variant_field_offsets));
359    }
360    
361    fn type_size_of(&self, ty: &ResolvedType) -> u32 {
362        match ty {
363            ResolvedType::Unit => 0,
364            ResolvedType::Bool => 4,
365            ResolvedType::Int(crate::types::IntSize::I8) | ResolvedType::Int(crate::types::IntSize::U8) => 1,
366            ResolvedType::Int(crate::types::IntSize::I16) | ResolvedType::Int(crate::types::IntSize::U16) => 2,
367            ResolvedType::Int(crate::types::IntSize::I32) | ResolvedType::Int(crate::types::IntSize::U32) => 4,
368            ResolvedType::Int(crate::types::IntSize::I64) | ResolvedType::Int(crate::types::IntSize::U64) | ResolvedType::Int(crate::types::IntSize::Isize) | ResolvedType::Int(crate::types::IntSize::Usize) => 8,
369            ResolvedType::Float(crate::types::FloatSize::F32) => 4,
370            ResolvedType::Float(crate::types::FloatSize::F64) => 8,
371            ResolvedType::String => 4, // pointer
372            ResolvedType::Char => 4,
373            ResolvedType::Array(_, len) => 4 + (*len as u32 * 8), // pointer + inline storage
374            ResolvedType::Struct(_, _) => 4, // pointer
375            _ => 8, // default to 8 bytes
376        }
377    }
378    
379    /// Emit bump allocator: allocates `size` bytes, returns pointer to start
380    /// Stack effect: [] -> [i32 pointer]
381    /// 
382    /// Algorithm:
383    ///   old_ptr = heap_ptr
384    ///   heap_ptr = (heap_ptr + size + 7) & ~7  // 8-byte aligned
385    ///   return old_ptr
386    fn emit_alloc(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, size: u32) {
387        // Get current heap pointer (this will be our return value)
388        builder.global_get(ctx.heap_ptr_global);
389        
390        // Compute new heap pointer: (heap_ptr + size + 7) & ~7
391        builder.global_get(ctx.heap_ptr_global);
392        builder.i32_const(size as i32);
393        builder.binop(walrus::ir::BinaryOp::I32Add);
394        builder.i32_const(7);
395        builder.binop(walrus::ir::BinaryOp::I32Add);
396        builder.i32_const(-8); // ~7 in two's complement
397        builder.binop(walrus::ir::BinaryOp::I32And);
398        
399        // Store new heap pointer
400        builder.global_set(ctx.heap_ptr_global);
401        
402        // Stack now has: [old_ptr] - which is our allocated address
403    }
404    
405    fn collect_strings_in_block(&mut self, block: &Block) {
406        for stmt in &block.stmts {
407            self.collect_strings_in_stmt(stmt);
408        }
409    }
410    
411    fn collect_strings_in_stmt(&mut self, stmt: &Stmt) {
412        match stmt {
413            Stmt::Expr(expr) => self.collect_strings_in_expr(expr),
414            Stmt::Let { value: Some(expr), .. } => self.collect_strings_in_expr(expr),
415            Stmt::Return(Some(expr), _) => self.collect_strings_in_expr(expr),
416            Stmt::While { condition, body, .. } => {
417                self.collect_strings_in_expr(condition);
418                self.collect_strings_in_block(body);
419            }
420            _ => {}
421        }
422    }
423    
424    fn collect_strings_in_expr(&mut self, expr: &Expr) {
425        match expr {
426            Expr::String(s, _) => {
427                self.allocate_string(s);
428            }
429            Expr::Binary { left, right, .. } => {
430                self.collect_strings_in_expr(left);
431                self.collect_strings_in_expr(right);
432            }
433            Expr::Call { args, .. } => {
434                for arg in args {
435                    self.collect_strings_in_expr(&arg.value);
436                }
437            }
438            Expr::If { condition, then_branch, else_branch, .. } => {
439                self.collect_strings_in_expr(condition);
440                self.collect_strings_in_block(then_branch);
441                if let Some(else_br) = else_branch {
442                    self.collect_strings_in_else(else_br);
443                }
444            }
445            Expr::JSX(node, _) => {
446                self.collect_strings_in_jsx(node);
447            }
448            _ => {}
449        }
450    }
451    
452    fn collect_strings_in_jsx(&mut self, node: &crate::ast::JSXNode) {
453        match node {
454            crate::ast::JSXNode::Element { tag, attributes, children, .. } => {
455                self.allocate_string(tag);
456                for attr in attributes {
457                    self.allocate_string(&attr.name);
458                    match &attr.value {
459                        crate::ast::JSXAttrValue::String(s) => { self.allocate_string(s); },
460                        crate::ast::JSXAttrValue::Expr(e) => self.collect_strings_in_expr(e),
461                        _ => {}
462                    }
463                }
464                for child in children {
465                    self.collect_strings_in_jsx(child);
466                }
467            }
468            crate::ast::JSXNode::Text(s, _) => {
469                self.allocate_string(s);
470            }
471            crate::ast::JSXNode::Expression(e) => {
472                self.collect_strings_in_expr(e);
473            }
474            crate::ast::JSXNode::ComponentCall { name, props, children, .. } => {
475                // Name might not be a string literal in runtime, but let's alloc it anyway
476                self.allocate_string(name);
477                for attr in props {
478                    self.allocate_string(&attr.name);
479                    match &attr.value {
480                        crate::ast::JSXAttrValue::String(s) => { self.allocate_string(s); },
481                        crate::ast::JSXAttrValue::Expr(e) => self.collect_strings_in_expr(e),
482                        _ => {}
483                    }
484                }
485                for child in children {
486                    self.collect_strings_in_jsx(child);
487                }
488            }
489            crate::ast::JSXNode::Fragment(children, _) => {
490                for child in children {
491                    self.collect_strings_in_jsx(child);
492                }
493            }
494            crate::ast::JSXNode::For { iter, body, .. } => {
495                self.collect_strings_in_expr(iter);
496                self.collect_strings_in_jsx(body);
497            }
498            crate::ast::JSXNode::If { condition, then_branch, else_branch, .. } => {
499                self.collect_strings_in_expr(condition);
500                self.collect_strings_in_jsx(then_branch);
501                if let Some(else_br) = else_branch {
502                    self.collect_strings_in_jsx(else_br);
503                }
504            }
505        }
506    }
507    
508    fn collect_strings_in_else(&mut self, branch: &crate::ast::ElseBranch) {
509        match branch {
510            crate::ast::ElseBranch::Else(block) => self.collect_strings_in_block(block),
511            crate::ast::ElseBranch::ElseIf(cond, then, next) => {
512                self.collect_strings_in_expr(cond);
513                self.collect_strings_in_block(then);
514                if let Some(next_br) = next {
515                    self.collect_strings_in_else(next_br);
516                }
517            }
518        }
519    }
520
521    // === LAMBDA COLLECTION AND COMPILATION ===
522
523    fn collect_lambdas_in_block(&mut self, block: &Block, lambdas: &mut Vec<(u32, Vec<crate::ast::Param>, Expr)>) {
524        for stmt in &block.stmts {
525            self.collect_lambdas_in_stmt(stmt, lambdas);
526        }
527    }
528
529    fn collect_lambdas_in_stmt(&mut self, stmt: &Stmt, lambdas: &mut Vec<(u32, Vec<crate::ast::Param>, Expr)>) {
530        match stmt {
531            Stmt::Expr(expr) => self.collect_lambdas_in_expr(expr, lambdas),
532            Stmt::Let { value: Some(expr), .. } => self.collect_lambdas_in_expr(expr, lambdas),
533            Stmt::Return(Some(expr), _) => self.collect_lambdas_in_expr(expr, lambdas),
534            Stmt::While { condition, body, .. } => {
535                self.collect_lambdas_in_expr(condition, lambdas);
536                self.collect_lambdas_in_block(body, lambdas);
537            }
538            Stmt::For { iter, body, .. } => {
539                self.collect_lambdas_in_expr(iter, lambdas);
540                self.collect_lambdas_in_block(body, lambdas);
541            }
542            Stmt::Loop { body, .. } => {
543                self.collect_lambdas_in_block(body, lambdas);
544            }
545            _ => {}
546        }
547    }
548
549    fn collect_lambdas_in_expr(&mut self, expr: &Expr, lambdas: &mut Vec<(u32, Vec<crate::ast::Param>, Expr)>) {
550        match expr {
551            Expr::Lambda { params, body, .. } => {
552                let id = self.lambda_counter;
553                self.lambda_counter += 1;
554                lambdas.push((id, params.clone(), (**body).clone()));
555                // Also collect nested lambdas in body
556                self.collect_lambdas_in_expr(body, lambdas);
557            }
558            Expr::Binary { left, right, .. } => {
559                self.collect_lambdas_in_expr(left, lambdas);
560                self.collect_lambdas_in_expr(right, lambdas);
561            }
562            Expr::Unary { operand, .. } => {
563                self.collect_lambdas_in_expr(operand, lambdas);
564            }
565            Expr::Call { callee, args, .. } => {
566                self.collect_lambdas_in_expr(callee, lambdas);
567                for arg in args {
568                    self.collect_lambdas_in_expr(&arg.value, lambdas);
569                }
570            }
571            Expr::MethodCall { receiver, args, .. } => {
572                self.collect_lambdas_in_expr(receiver, lambdas);
573                for arg in args {
574                    self.collect_lambdas_in_expr(&arg.value, lambdas);
575                }
576            }
577            Expr::If { condition, then_branch, else_branch, .. } => {
578                self.collect_lambdas_in_expr(condition, lambdas);
579                self.collect_lambdas_in_block(then_branch, lambdas);
580                if let Some(else_br) = else_branch {
581                    self.collect_lambdas_in_else_branch(else_br, lambdas);
582                }
583            }
584            Expr::Match { scrutinee, arms, .. } => {
585                self.collect_lambdas_in_expr(scrutinee, lambdas);
586                for arm in arms {
587                    self.collect_lambdas_in_expr(&arm.body, lambdas);
588                }
589            }
590            Expr::Array(elements, _) => {
591                for e in elements {
592                    self.collect_lambdas_in_expr(e, lambdas);
593                }
594            }
595            Expr::Tuple(elements, _) => {
596                for e in elements {
597                    self.collect_lambdas_in_expr(e, lambdas);
598                }
599            }
600            Expr::Block(block, _) => {
601                self.collect_lambdas_in_block(block, lambdas);
602            }
603            _ => {}
604        }
605    }
606
607    fn collect_lambdas_in_else_branch(&mut self, branch: &crate::ast::ElseBranch, lambdas: &mut Vec<(u32, Vec<crate::ast::Param>, Expr)>) {
608        match branch {
609            crate::ast::ElseBranch::Else(block) => self.collect_lambdas_in_block(block, lambdas),
610            crate::ast::ElseBranch::ElseIf(cond, then, next) => {
611                self.collect_lambdas_in_expr(cond, lambdas);
612                self.collect_lambdas_in_block(then, lambdas);
613                if let Some(next_br) = next {
614                    self.collect_lambdas_in_else_branch(next_br, lambdas);
615                }
616            }
617        }
618    }
619
620    /// Compile a collected lambda into a WASM function and add to funcref table
621    fn compile_lambda(&mut self, id: u32, params: &[crate::ast::Param], body: &Expr) -> KoreResult<()> {
622        // Create function type: all params i64, returns i64
623        let wasm_params: Vec<ValType> = params.iter().map(|_| ValType::I64).collect();
624        let wasm_results = vec![ValType::I64];
625        
626        let mut builder = FunctionBuilder::new(&mut self.module.types, &wasm_params, &wasm_results);
627        
628        // Create parameter locals
629        let mut locals = HashMap::new();
630        let mut param_local_ids = Vec::new();
631        for (i, param) in params.iter().enumerate() {
632            let local_id = self.module.locals.add(wasm_params[i]);
633            locals.insert(param.name.clone(), local_id);
634            param_local_ids.push(local_id);
635        }
636        
637        let tmp_i32 = self.module.locals.add(ValType::I32);
638        let tmp_i32_2 = self.module.locals.add(ValType::I32);
639        let tmp_i64 = self.module.locals.add(ValType::I64);
640        
641        let ctx = CompilationContext {
642            locals,
643            functions: &self.functions,
644            string_table: &self.string_table,
645            struct_layouts: &self.struct_layouts,
646            enum_layouts: &self.enum_layouts,
647            memory_id: self.memory_id.unwrap(),
648            heap_ptr_global: self.heap_ptr_global,
649            tmp_i32,
650            tmp_i32_2,
651            tmp_i64,
652            funcref_table: self.funcref_table,
653            lambda_table: &self.lambda_table,
654        };
655        
656        // Compile lambda body
657        let mut func_body = builder.func_body();
658        self.compile_expr(&ctx, &mut func_body, body)?;
659        
660        // Finish function
661        let func_id = builder.finish(param_local_ids, &mut self.module.funcs);
662        
663        // Add to function table via elem segment
664        let table_index = id; // Use lambda ID as table index
665        if let Some(table_id) = self.funcref_table {
666            // Add function to table via elem segment
667            self.module.elements.add(
668                walrus::ElementKind::Active {
669                    table: table_id,
670                    offset: walrus::ConstExpr::Value(walrus::ir::Value::I32(table_index as i32)),
671                },
672                walrus::ElementItems::Functions(vec![func_id]),
673            );
674        }
675        
676        // Store in lambda_table for lookup during compilation
677        self.lambda_table.insert(id, (table_index, func_id));
678        
679        // Also add to functions map with generated name
680        let lambda_name = format!("__lambda_{}", id);
681        self.functions.insert(lambda_name, func_id);
682        
683        Ok(())
684    }
685
686    fn declare_function(&mut self, func: &TypedFunction) -> KoreResult<()> {
687        let (param_types, ret_type) = if let ResolvedType::Function { params, ret, .. } = &func.resolved_type {
688            (params, ret)
689        } else {
690            return Err(KoreError::codegen("Expected function type", func.ast.span));
691        };
692
693        let wasm_params: Vec<ValType> = param_types.iter().map(|t| self.map_type(t)).collect();
694        let wasm_results = if **ret_type == ResolvedType::Unit {
695            vec![]
696        } else {
697            vec![self.map_type(ret_type)]
698        };
699
700        // Use FunctionBuilder to create the function correctly with empty body
701        let builder = FunctionBuilder::new(&mut self.module.types, &wasm_params, &wasm_results);
702        
703        // Create parameter locals manually to pass to finish
704        let mut param_local_ids = Vec::new();
705        for &param_type in &wasm_params {
706            param_local_ids.push(self.module.locals.add(param_type));
707        }
708
709        let func_id = builder.finish(param_local_ids, &mut self.module.funcs);
710        self.functions.insert(func.ast.name.clone(), func_id);
711
712        if matches!(func.ast.visibility, crate::ast::Visibility::Public) {
713            self.module.exports.add(&func.ast.name, func_id);
714        }
715
716        Ok(())
717    }
718
719    fn compile_function_body(&mut self, func: &TypedFunction) -> KoreResult<()> {
720        let func_id = *self.functions.get(&func.ast.name).unwrap();
721
722        let (param_types, ret_type) = if let ResolvedType::Function { params, ret, .. } = &func.resolved_type {
723            (params, ret)
724        } else {
725            return Ok(()); // Should have failed in declare
726        };
727
728        let wasm_params: Vec<ValType> = param_types.iter().map(|t| self.map_type(t)).collect();
729        let wasm_results = if **ret_type == ResolvedType::Unit {
730            vec![]
731        } else {
732            vec![self.map_type(ret_type)]
733        };
734
735        let mut builder = FunctionBuilder::new(&mut self.module.types, &wasm_params, &wasm_results);
736        
737        let mut text_locals_map = HashMap::new();
738        let mut param_local_ids = Vec::new();
739
740        // 1. Argument Locals
741        for (i, param) in func.ast.params.iter().enumerate() {
742            let local_id = self.module.locals.add(wasm_params[i]);
743            text_locals_map.insert(param.name.clone(), local_id);
744            param_local_ids.push(local_id);
745        }
746        
747        // 2. Scan body for Let bindings and pre-allocate locals
748        self.preallocate_locals(&func.ast.body, &mut text_locals_map);
749
750        let tmp_i32 = self.module.locals.add(ValType::I32);
751        let tmp_i32_2 = self.module.locals.add(ValType::I32);
752        let tmp_i64 = self.module.locals.add(ValType::I64);
753
754        let ctx = CompilationContext {
755            locals: text_locals_map,
756            functions: &self.functions,
757            string_table: &self.string_table,
758            struct_layouts: &self.struct_layouts,
759            enum_layouts: &self.enum_layouts,
760            memory_id: self.memory_id.unwrap(),
761            heap_ptr_global: self.heap_ptr_global,
762            tmp_i32,
763            tmp_i32_2,
764            tmp_i64,
765            funcref_table: self.funcref_table,
766            lambda_table: &self.lambda_table,
767        };
768
769        // 3. Compile body
770        let mut func_body = builder.func_body();
771        self.compile_block(&ctx, &mut func_body, &func.ast.body)?;
772        
773        // Return default value if needed
774        if func.ast.body.stmts.is_empty() && !wasm_results.is_empty() {
775             match wasm_results[0] {
776                 ValType::I64 => func_body.i64_const(0),
777                 ValType::I32 => func_body.i32_const(0),
778                 ValType::F64 => func_body.f64_const(0.0),
779                 ValType::F32 => func_body.f32_const(0.0),
780                 _ => func_body.i64_const(0),
781             };
782        }
783
784        // Finish the builder to get a NEW function ID with the compiled body
785        let temp_func_id = builder.finish(param_local_ids, &mut self.module.funcs);
786
787        // 4. Move body from temp function to the reserved function
788        // We use a dummy ImportFunction kind to facilitate the swap, 
789        // derived from a dummy Global import to avoid circular dependencies with Function imports.
790        
791        let dummy_type = self.module.types.add(&[], &[]);
792        let (_dummy_global_id, dummy_import_id) = self.module.add_import_global("KORE_internal", "dummy", ValType::I32, false, false);
793        
794        let dummy_kind = walrus::FunctionKind::Import(walrus::ImportedFunction {
795            import: dummy_import_id,
796            ty: dummy_type,
797        });
798
799        // Swap out the new body from temp_func
800        let new_func = self.module.funcs.get_mut(temp_func_id);
801        let new_kind = std::mem::replace(&mut new_func.kind, dummy_kind);
802        
803        // Swap in the new body to the old function
804        let old_func = self.module.funcs.get_mut(func_id);
805        let _old_kind = std::mem::replace(&mut old_func.kind, new_kind);
806
807        // Clean up
808        self.module.funcs.delete(temp_func_id);
809        self.module.imports.delete(dummy_import_id);
810        // Globals cleanup? module.globals.delete(_dummy_global_id)?
811        
812        Ok(())
813    }
814
815    fn preallocate_locals(&mut self, block: &Block, locals: &mut HashMap<String, LocalId>) {
816        for stmt in &block.stmts {
817            match stmt {
818                Stmt::Let { pattern, .. } => {
819                     // Recursively find bindings
820                     if let crate::ast::Pattern::Binding { name, .. } = pattern {
821                        if !locals.contains_key(name) {
822                            let local = self.module.locals.add(ValType::I64); // Assume I64 for now
823                            locals.insert(name.clone(), local);
824                        }
825                     }
826                }
827                Stmt::While { body, .. } => {
828                    self.preallocate_locals(body, locals);
829                }
830                Stmt::For { binding, body, .. } => {
831                    // Allocate loop variable
832                    if let crate::ast::Pattern::Binding { name, .. } = binding {
833                        if !locals.contains_key(name) {
834                            let local = self.module.locals.add(ValType::I64);
835                            locals.insert(name.clone(), local);
836                        }
837                    }
838                    self.preallocate_locals(body, locals);
839                }
840                Stmt::Loop { body, .. } => {
841                    self.preallocate_locals(body, locals);
842                }
843                _ => {}
844            }
845        }
846    }
847
848    fn map_type(&self, ty: &ResolvedType) -> ValType {
849        match ty {
850            ResolvedType::Int(_) => ValType::I64,
851            ResolvedType::Float(_) => ValType::F64,
852            ResolvedType::Bool => ValType::I32,
853            ResolvedType::String => ValType::I32, // Strings are pointers (i32 offset)
854            _ => ValType::I64, 
855        }
856    }
857    
858    /// Allocate a string literal in the data segment
859    /// Returns the memory offset where the string starts
860    /// Format: [length: 4 bytes][utf8 data]
861    fn allocate_string(&mut self, s: &str) -> u32 {
862        // Check if string already allocated (deduplication)
863        if let Some(&offset) = self.string_table.get(s) {
864            return offset;
865        }
866        
867        let offset = self.data_offset;
868        let bytes = s.as_bytes();
869        let len = bytes.len() as u32;
870        
871        // Build data: length (4 bytes, little-endian) + string bytes
872        let mut data = Vec::with_capacity(4 + bytes.len());
873        data.extend_from_slice(&len.to_le_bytes());
874        data.extend_from_slice(bytes);
875        
876        // Add to data segment
877        if let Some(memory_id) = self.memory_id {
878            self.module.data.add(
879                walrus::DataKind::Active {
880                    memory: memory_id,
881                    offset: walrus::ConstExpr::Value(walrus::ir::Value::I32(offset as i32)),
882                },
883                data,
884            );
885        }
886        
887        // Update offset for next allocation
888        self.data_offset += 4 + len;
889        // Align to 4 bytes
890        self.data_offset = (self.data_offset + 3) & !3;
891        
892        // Cache for deduplication
893        self.string_table.insert(s.to_string(), offset);
894        
895        offset
896    }
897    
898    // --- Compilation Logic (Stateless regarding Module, uses passed Builder) ---
899
900    fn compile_block(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, block: &Block) -> KoreResult<()> {
901        for stmt in &block.stmts {
902           self.compile_stmt(ctx, builder, stmt)?;
903        }
904        Ok(())
905    }
906
907    fn compile_stmt(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, stmt: &Stmt) -> KoreResult<()> {
908        match stmt {
909            Stmt::Expr(expr) => {
910                self.compile_expr(ctx, builder, expr)?;
911                // Expression statements discard their result
912                builder.drop(); 
913            }
914            Stmt::Let { value, pattern, .. } => {
915                if let Some(val_expr) = value {
916                    self.compile_expr(ctx, builder, val_expr)?;
917                    if let crate::ast::Pattern::Binding { name, .. } = pattern {
918                         if let Some(local_id) = ctx.locals.get(name) {
919                             builder.local_set(*local_id);
920                         }
921                    }
922                }
923            }
924            Stmt::Return(opt_expr, _) => {
925                if let Some(expr) = opt_expr {
926                    self.compile_expr(ctx, builder, expr)?;
927                }
928                builder.return_(); 
929            }
930            Stmt::While { condition, body, .. } => {
931                builder.block(None, |block_builder| {
932                    let block_id = block_builder.id();
933                    
934                    block_builder.loop_(None, |loop_builder| {
935                        let loop_id = loop_builder.id();
936                        
937                        if self.compile_expr(ctx, loop_builder, condition).is_err() {
938                            return;
939                        }
940
941                        loop_builder.unop(walrus::ir::UnaryOp::I32Eqz);
942                        loop_builder.br_if(block_id);
943                        
944                        if self.compile_block(ctx, loop_builder, body).is_err() {
945                            return;
946                        }
947
948                        loop_builder.br(loop_id);
949                    });
950                });
951            }
952            // For loop: `for i in start..end: body`
953            // Desugars to: let i = start; while i < end: body; i = i + 1
954            Stmt::For { binding, iter, body, span: _ } => {
955                // Get the loop variable name
956                let loop_var = match binding {
957                    crate::ast::Pattern::Binding { name, .. } => name.clone(),
958                    _ => "".to_string(),
959                };
960                
961                // Get start and end from range expression
962                if let Expr::Range { start, end, inclusive, .. } = iter {
963                    let start_expr = start.as_ref().map(|e| e.as_ref());
964                    let end_expr = end.as_ref().map(|e| e.as_ref());
965                    
966                    // Initialize loop variable with start value
967                    if let Some(start_e) = start_expr {
968                        self.compile_expr(ctx, builder, start_e)?;
969                    } else {
970                        builder.i64_const(0);
971                    }
972                    
973                    if let Some(local_id) = ctx.locals.get(&loop_var) {
974                        builder.local_set(*local_id);
975                    }
976                    
977                    // block { loop { if i >= end: break; body; i++; br loop } }
978                    builder.block(None, |block_builder| {
979                        let block_id = block_builder.id();
980                        
981                        block_builder.loop_(None, |loop_builder| {
982                            let loop_id = loop_builder.id();
983                            
984                            // Check condition: i < end (or i <= end if inclusive)
985                            if let Some(local_id) = ctx.locals.get(&loop_var) {
986                                loop_builder.local_get(*local_id);
987                            }
988                            
989                            if let Some(end_e) = end_expr {
990                                if self.compile_expr(ctx, loop_builder, end_e).is_err() {
991                                    return;
992                                }
993                            } else {
994                                loop_builder.i64_const(i64::MAX);
995                            }
996                            
997                            // Compare: if i >= end (or i > end if inclusive), break
998                            if *inclusive {
999                                loop_builder.binop(walrus::ir::BinaryOp::I64GtS);
1000                            } else {
1001                                loop_builder.binop(walrus::ir::BinaryOp::I64GeS);
1002                            }
1003                            loop_builder.br_if(block_id);
1004                            
1005                            // Execute body
1006                            if self.compile_block(ctx, loop_builder, body).is_err() {
1007                                return;
1008                            }
1009                            
1010                            // Increment loop variable: i = i + 1
1011                            if let Some(local_id) = ctx.locals.get(&loop_var) {
1012                                loop_builder.local_get(*local_id);
1013                                loop_builder.i64_const(1);
1014                                loop_builder.binop(walrus::ir::BinaryOp::I64Add);
1015                                loop_builder.local_set(*local_id);
1016                            }
1017                            
1018                            loop_builder.br(loop_id);
1019                        });
1020                    });
1021                } else {
1022                    // Non-range iterators not yet supported
1023                    // For arrays: would need to get length, index each element
1024                }
1025            }
1026            // Infinite loop: `loop: body` - can be exited with break
1027            Stmt::Loop { body, span: _ } => {
1028                builder.block(None, |block_builder| {
1029                    let _block_id = block_builder.id();
1030                    
1031                    block_builder.loop_(None, |loop_builder| {
1032                        let loop_id = loop_builder.id();
1033                        
1034                        // Execute body
1035                        if self.compile_block(ctx, loop_builder, body).is_err() {
1036                            return;
1037                        }
1038                        
1039                        // Continue loop
1040                        loop_builder.br(loop_id);
1041                    });
1042                });
1043            }
1044            // Break statement
1045            Stmt::Break(_, _) => {
1046                // Break out of innermost block
1047                // Note: This is simplified - would need proper block tracking for nested loops
1048                builder.unreachable(); // Placeholder - real impl needs block ID tracking
1049            }
1050            // Continue statement  
1051            Stmt::Continue(_) => {
1052                // Jump to loop header
1053                builder.unreachable(); // Placeholder - real impl needs loop ID tracking
1054            }
1055            _ => {}
1056        }
1057        Ok(())
1058    }
1059
1060    fn is_string_expr(&self, expr: &Expr) -> bool {
1061        match expr {
1062            Expr::String(_, _) => true,
1063            Expr::Call { callee, .. } => {
1064                if let Expr::Ident(name, _) = callee.as_ref() {
1065                    name == "to_string" || name == "str_concat" 
1066                } else {
1067                    false
1068                }
1069            }
1070            Expr::Binary { op, left, right, .. } => {
1071                 match op {
1072                     BinaryOp::Add => self.is_string_expr(left) || self.is_string_expr(right),
1073                     _ => false
1074                 }
1075            }
1076            _ => false
1077        }
1078    }
1079
1080    fn compile_expr(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, expr: &Expr) -> KoreResult<()> {
1081        match expr {
1082            Expr::Int(n, _) => {
1083                builder.i64_const(*n);
1084            }
1085            Expr::Float(f, _) => {
1086                builder.f64_const(*f);
1087            }
1088            Expr::Bool(b, _) => {
1089                builder.i32_const(if *b { 1 } else { 0 });
1090            }
1091            Expr::String(s, span) => {
1092                // String literals are stored in data segment during pre-pass
1093                // Here we just emit the memory offset as an i32
1094                if let Some(&offset) = ctx.string_table.get(s) {
1095                    builder.i32_const((offset + 4) as i32); // Return pointer to data (skip length)
1096                } else {
1097                    return Err(KoreError::codegen("String not found in table", *span));
1098                }
1099            }
1100            Expr::Binary { left, op, right, .. } => {
1101                self.compile_expr(ctx, builder, left)?;
1102                self.compile_expr(ctx, builder, right)?;
1103                match op {
1104                    // Arithmetic
1105                    BinaryOp::Add => { 
1106                        if self.is_string_expr(left) || self.is_string_expr(right) {
1107                            if let Some(func_id) = ctx.functions.get("str_concat") {
1108                                builder.call(*func_id);
1109                            }
1110                        } else {
1111                            builder.binop(walrus::ir::BinaryOp::I64Add); 
1112                        }
1113                    },
1114                    BinaryOp::Sub => { builder.binop(walrus::ir::BinaryOp::I64Sub); },
1115                    BinaryOp::Mul => { builder.binop(walrus::ir::BinaryOp::I64Mul); },
1116                    BinaryOp::Div => { builder.binop(walrus::ir::BinaryOp::I64DivS); },
1117                    BinaryOp::Mod => { builder.binop(walrus::ir::BinaryOp::I64RemS); },
1118                    // Comparison
1119                    BinaryOp::Eq => { builder.binop(walrus::ir::BinaryOp::I64Eq); },
1120                    BinaryOp::Ne => { builder.binop(walrus::ir::BinaryOp::I64Ne); },
1121                    BinaryOp::Lt => { builder.binop(walrus::ir::BinaryOp::I64LtS); },
1122                    BinaryOp::Gt => { builder.binop(walrus::ir::BinaryOp::I64GtS); },
1123                    BinaryOp::Le => { builder.binop(walrus::ir::BinaryOp::I64LeS); },
1124                    BinaryOp::Ge => { builder.binop(walrus::ir::BinaryOp::I64GeS); },
1125                    // Logical (short-circuit would need control flow, treat as bitwise for now)
1126                    BinaryOp::And => { builder.binop(walrus::ir::BinaryOp::I64And); },
1127                    BinaryOp::Or => { builder.binop(walrus::ir::BinaryOp::I64Or); },
1128                    // Bitwise
1129                    BinaryOp::BitAnd => { builder.binop(walrus::ir::BinaryOp::I64And); },
1130                    BinaryOp::BitOr => { builder.binop(walrus::ir::BinaryOp::I64Or); },
1131                    BinaryOp::BitXor => { builder.binop(walrus::ir::BinaryOp::I64Xor); },
1132                    BinaryOp::Shl => { builder.binop(walrus::ir::BinaryOp::I64Shl); },
1133                    BinaryOp::Shr => { builder.binop(walrus::ir::BinaryOp::I64ShrS); },
1134                     _ => {}
1135                }
1136            }
1137            Expr::Unary { op, operand, .. } => {
1138                use crate::ast::UnaryOp;
1139                match op {
1140                    UnaryOp::Neg => { 
1141                        // -x = 0 - x: push 0 first, then operand, then sub
1142                        builder.i64_const(0);
1143                        self.compile_expr(ctx, builder, operand)?;
1144                        builder.binop(walrus::ir::BinaryOp::I64Sub);
1145                    },
1146                    UnaryOp::Not => {
1147                        // !x = x == 0 (logical not)
1148                        self.compile_expr(ctx, builder, operand)?;
1149                        builder.unop(walrus::ir::UnaryOp::I64Eqz);
1150                    },
1151                    UnaryOp::BitNot => {
1152                        // ~x = x xor -1
1153                        self.compile_expr(ctx, builder, operand)?;
1154                        builder.i64_const(-1);
1155                        builder.binop(walrus::ir::BinaryOp::I64Xor);
1156                    },
1157                    _ => {
1158                        // Ref, Deref - just compile operand for now
1159                        self.compile_expr(ctx, builder, operand)?;
1160                    }
1161                }
1162            }
1163            Expr::Ident(name, span) => {
1164                if let Some(local_id) = ctx.locals.get(name) {
1165                    builder.local_get(*local_id);
1166                } else {
1167                     return Err(KoreError::codegen(format!("Variable '{}' not found in locals", name), *span));
1168                }
1169            }
1170            Expr::If { condition, then_branch, else_branch, .. } => {
1171                 self.compile_expr(ctx, builder, condition)?;
1172                 
1173                 builder.if_else(
1174                    None, 
1175                    |then_builder| {
1176                        let _ = self.compile_block(ctx, then_builder, then_branch);
1177                    },
1178                    |else_builder| {
1179                        if let Some(else_br) = else_branch {
1180                            let _ = self.compile_else_branch(ctx, else_builder, else_br);
1181                        }
1182                    }
1183                 );
1184            }
1185            Expr::JSX(node, _) => {
1186                self.compile_jsx_node(ctx, builder, node)?;
1187            }
1188            Expr::Call { callee, args, span } => {
1189                // Get function name from callee
1190                if let Expr::Ident(func_name, _) = callee.as_ref() {
1191                    // Special intrinsic: print
1192                    if func_name == "print" {
1193                        for arg in args {
1194                            match &arg.value {
1195                                Expr::Int(_, _) => {
1196                                    self.compile_expr(ctx, builder, &arg.value)?;
1197                                    if let Some(func_id) = ctx.functions.get("print_i64") {
1198                                        builder.call(*func_id);
1199                                    }
1200                                }
1201                                Expr::Float(_, _) => {
1202                                    self.compile_expr(ctx, builder, &arg.value)?;
1203                                    if let Some(func_id) = ctx.functions.get("print_f64") {
1204                                        builder.call(*func_id);
1205                                    }
1206                                }
1207                                Expr::Bool(_, _) => {
1208                                    self.compile_expr(ctx, builder, &arg.value)?;
1209                                    if let Some(func_id) = ctx.functions.get("print_bool") {
1210                                        builder.call(*func_id);
1211                                    }
1212                                }
1213                                Expr::String(s, _) => {
1214                                    if let Some(&offset) = ctx.string_table.get(s) {
1215                                        builder.i32_const((offset + 4) as i32);
1216                                        builder.i32_const(s.len() as i32);
1217                                        if let Some(func_id) = ctx.functions.get("print_str") {
1218                                            builder.call(*func_id);
1219                                        }
1220                                    }
1221                                }
1222                                _ => {
1223                                    let is_string = self.is_string_expr(&arg.value);
1224                                    self.compile_expr(ctx, builder, &arg.value)?;
1225                                    if is_string {
1226                                        // ptr is on stack. Len is at ptr - 4.
1227                                        builder.local_set(ctx.tmp_i32);
1228                                        builder.local_get(ctx.tmp_i32); // ptr
1229                                        
1230                                        builder.local_get(ctx.tmp_i32);
1231                                        builder.i32_const(4);
1232                                        builder.binop(walrus::ir::BinaryOp::I32Sub);
1233                                        builder.load(ctx.memory_id, walrus::ir::LoadKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 }); // len
1234                                        
1235                                        if let Some(func_id) = ctx.functions.get("print_str") {
1236                                            builder.call(*func_id);
1237                                        }
1238                                    } else {
1239                                        if let Some(func_id) = ctx.functions.get("print_i64") {
1240                                            builder.call(*func_id);
1241                                        }
1242                                    }
1243                                }
1244                            }
1245                        }
1246                        builder.i64_const(0); // Return Unit/0
1247                        return Ok(());
1248                    }
1249
1250                    // Special intrinsic: to_string
1251                    if func_name == "to_string" {
1252                        if let Some(arg) = args.first() {
1253                             self.compile_expr(ctx, builder, &arg.value)?;
1254                             if let Some(func_id) = ctx.functions.get("int_to_str") {
1255                                 builder.call(*func_id);
1256                             }
1257                        } else {
1258                            builder.i32_const(0);
1259                        }
1260                        return Ok(());
1261                    }
1262
1263                    // Special intrinsic: now
1264                    if func_name == "now" {
1265                        if let Some(func_id) = ctx.functions.get("time_now") {
1266                            builder.call(*func_id);
1267                        }
1268                        return Ok(());
1269                    }
1270
1271                    // Look up function ID
1272                    if let Some(func_id) = ctx.functions.get(func_name) {
1273                        // Compile arguments (push onto stack)
1274                        for arg in args {
1275                            self.compile_expr(ctx, builder, &arg.value)?;
1276                        }
1277                        // Emit call instruction
1278                        builder.call(*func_id);
1279                    } else {
1280                        return Err(KoreError::codegen(format!("Function '{}' not found", func_name), *span));
1281                    }
1282                } else {
1283                    // For now, only support direct function calls by name
1284                    return Err(KoreError::codegen("Only direct function calls supported in WASM", *span));
1285                }
1286            }
1287            // Struct literal: allocate memory and initialize fields
1288            Expr::EnumVariant { enum_name, variant, fields, span } => {
1289                if let Some((tags, max_payload, field_offsets_map)) = ctx.enum_layouts.get(enum_name) {
1290                     let tag = *tags.get(variant).ok_or_else(|| KoreError::codegen("Variant tag not found", *span))?;
1291                     
1292                     // 4 bytes tag + payload
1293                     let total_size = 4 + max_payload;
1294                     self.emit_alloc(ctx, builder, total_size);
1295                     // Stack: [base_ptr]
1296                     
1297                     // Drop base_ptr to recompute for stores
1298                     builder.drop();
1299
1300                     // Store tag at offset 0
1301                     let aligned_size = (total_size + 7) & !7;
1302                     
1303                     builder.global_get(ctx.heap_ptr_global);
1304                     builder.i32_const(aligned_size as i32);
1305                     builder.binop(walrus::ir::BinaryOp::I32Sub);
1306                     
1307                     builder.i32_const(tag as i32);
1308                     builder.store(
1309                         ctx.memory_id,
1310                         walrus::ir::StoreKind::I32 { atomic: false },
1311                         walrus::ir::MemArg { align: 4, offset: 0 },
1312                     );
1313
1314                     match fields {
1315                         crate::ast::EnumVariantFields::Unit => {},
1316                         crate::ast::EnumVariantFields::Tuple(exprs) => {
1317                             let variant_offsets = field_offsets_map.get(variant).expect("Variant offsets missing");
1318                             for (i, expr) in exprs.iter().enumerate() {
1319                                 if let Some(&offset) = variant_offsets.get(&i.to_string()) {
1320                                     builder.global_get(ctx.heap_ptr_global);
1321                                     builder.i32_const(aligned_size as i32);
1322                                     builder.binop(walrus::ir::BinaryOp::I32Sub);
1323                                     builder.i32_const((4 + offset) as i32);
1324                                     builder.binop(walrus::ir::BinaryOp::I32Add);
1325                                     
1326                                     self.compile_expr(ctx, builder, expr)?;
1327                                     self.emit_store_for_expr(ctx, builder, expr, 0); 
1328                                 }
1329                             }
1330                         },
1331                         crate::ast::EnumVariantFields::Struct(named_fields) => {
1332                             let variant_offsets = field_offsets_map.get(variant).expect("Variant offsets missing");
1333                             for (name, expr) in named_fields {
1334                                 if let Some(&offset) = variant_offsets.get(name) {
1335                                     builder.global_get(ctx.heap_ptr_global);
1336                                     builder.i32_const(aligned_size as i32);
1337                                     builder.binop(walrus::ir::BinaryOp::I32Sub);
1338                                     builder.i32_const((4 + offset) as i32);
1339                                     builder.binop(walrus::ir::BinaryOp::I32Add);
1340                                     
1341                                     self.compile_expr(ctx, builder, expr)?;
1342                                     self.emit_store_for_expr(ctx, builder, expr, 0);
1343                                 }
1344                             }
1345                         }
1346                     }
1347
1348                     // Return base pointer
1349                     builder.global_get(ctx.heap_ptr_global);
1350                     builder.i32_const(aligned_size as i32);
1351                     builder.binop(walrus::ir::BinaryOp::I32Sub);
1352                } else {
1353                    return Err(KoreError::codegen(format!("Enum layout not found for {}", enum_name), *span));
1354                }
1355            }
1356            Expr::Struct { name, fields, span } => {
1357                if let Some((field_offsets, total_size)) = ctx.struct_layouts.get(name).cloned() {
1358                    // Allocate memory for struct using bump allocator
1359                    self.emit_alloc(ctx, builder, total_size);
1360                    // Stack: [base_ptr]
1361                    
1362                    // We need to keep base_ptr for field stores AND return it
1363                    // Strategy: for each field, dup the ptr, add offset, store
1364                    // But walrus doesn't have dup... so we emit base_ptr before each store
1365                    
1366                    // Store fields: emit [addr, value] then store
1367                    for (field_name, field_expr) in fields {
1368                        if let Some(&field_offset) = field_offsets.get(field_name) {
1369                            // Emit base_ptr + offset for store address
1370                            builder.global_get(ctx.heap_ptr_global);
1371                            // Need to subtract total_size to get back to our base
1372                            // Actually, heap_ptr now points PAST our allocation
1373                            // Our base = heap_ptr - aligned_size
1374                            // Simpler: re-emit the base calculation
1375                            
1376                            // Get the base we just allocated (heap_ptr - aligned_total_size)
1377                            let aligned_size = (total_size + 7) & !7;
1378                            builder.i32_const(aligned_size as i32);
1379                            builder.binop(walrus::ir::BinaryOp::I32Sub);
1380                            builder.i32_const(field_offset as i32);
1381                            builder.binop(walrus::ir::BinaryOp::I32Add);
1382                            // Stack: [field_addr]
1383                            
1384                            // Compile the field value
1385                            self.compile_expr(ctx, builder, field_expr)?;
1386                            // Stack: [field_addr, value]
1387                            
1388                            // Store (assumes i64 for now)
1389                            builder.store(
1390                                ctx.memory_id,
1391                                walrus::ir::StoreKind::I64 { atomic: false },
1392                                walrus::ir::MemArg { align: 8, offset: 0 },
1393                            );
1394                        }
1395                    }
1396                    
1397                    // Leave struct pointer on stack (base address)
1398                    let aligned_size = (total_size + 7) & !7;
1399                    builder.global_get(ctx.heap_ptr_global);
1400                    builder.i32_const(aligned_size as i32);
1401                    builder.binop(walrus::ir::BinaryOp::I32Sub);
1402                } else {
1403                    return Err(KoreError::codegen(format!("Struct '{}' layout not found", name), *span));
1404                }
1405            }
1406            // Field access: load from struct pointer + offset
1407            Expr::Field { object, field, span: _ } => {
1408                // Compile the object to get struct pointer
1409                self.compile_expr(ctx, builder, object)?;
1410                // Stack: [ptr]
1411                
1412                // Try to find field offset from any struct layout
1413                // This is a heuristic - proper impl would use type info
1414                let mut field_offset = 0u32;
1415                let mut found = false;
1416                for (_struct_name, (offsets, _size)) in ctx.struct_layouts.iter() {
1417                    if let Some(&offset) = offsets.get(field) {
1418                        field_offset = offset;
1419                        found = true;
1420                        break;
1421                    }
1422                }
1423                
1424                if found && field_offset > 0 {
1425                    builder.i32_const(field_offset as i32);
1426                    builder.binop(walrus::ir::BinaryOp::I32Add);
1427                }
1428                
1429                // Load value from memory (default to i64)
1430                builder.load(
1431                    ctx.memory_id,
1432                    walrus::ir::LoadKind::I64 { atomic: false },
1433                    walrus::ir::MemArg { align: 8, offset: 0 },
1434                );
1435            }
1436            // Method call: obj.method(args) desugars to Type.method(obj, args)
1437            Expr::MethodCall { receiver, method, args, span } => {
1438                // Compile the receiver (self)
1439                self.compile_expr(ctx, builder, receiver)?;
1440                
1441                // Compile arguments
1442                for arg in args {
1443                    self.compile_expr(ctx, builder, &arg.value)?;
1444                }
1445                
1446                // Look for method in functions map
1447                // Methods are typically named "TypeName.method_name"
1448                // For now, try just the method name
1449                if let Some(func_id) = ctx.functions.get(method) {
1450                    builder.call(*func_id);
1451                } else {
1452                    // Method not found - leave result on stack as placeholder
1453                    // Real impl would look for impl blocks
1454                    return Err(KoreError::codegen(format!("Method '{}' not found", method), *span));
1455                }
1456            }
1457            // Array literal: allocate memory and store length + elements
1458            Expr::Array(elements, _span) => {
1459                let len = elements.len() as u32;
1460                let element_size = 8u32; // i64 elements
1461                let total_size = 4 + (len * element_size); // 4 bytes for length + elements
1462                let aligned_size = (total_size + 7) & !7;
1463                
1464                // Allocate using bump allocator
1465                self.emit_alloc(ctx, builder, total_size);
1466                // Stack: [base_ptr] - but emit_alloc leaves OLD ptr, heap_ptr is now past us
1467                // Actually emit_alloc returns old heap_ptr which IS our base. Perfect!
1468                
1469                // Drop the base_ptr from stack for now, we'll recompute for stores
1470                builder.drop();
1471                
1472                // Compute base address: heap_ptr - aligned_size
1473                let get_base = |b: &mut InstrSeqBuilder, hp: walrus::GlobalId, sz: u32| {
1474                    b.global_get(hp);
1475                    b.i32_const(sz as i32);
1476                    b.binop(walrus::ir::BinaryOp::I32Sub);
1477                };
1478                
1479                // Store length at base
1480                get_base(builder, ctx.heap_ptr_global, aligned_size);
1481                builder.i32_const(len as i32);
1482                builder.store(
1483                    ctx.memory_id,
1484                    walrus::ir::StoreKind::I32 { atomic: false },
1485                    walrus::ir::MemArg { align: 4, offset: 0 },
1486                );
1487                
1488                // Store each element
1489                for (i, elem) in elements.iter().enumerate() {
1490                    // Address = base + 4 + (i * 8)
1491                    get_base(builder, ctx.heap_ptr_global, aligned_size);
1492                    builder.i32_const((4 + i as u32 * element_size) as i32);
1493                    builder.binop(walrus::ir::BinaryOp::I32Add);
1494                    
1495                    self.compile_expr(ctx, builder, elem)?;
1496                    builder.store(
1497                        ctx.memory_id,
1498                        walrus::ir::StoreKind::I64 { atomic: false },
1499                        walrus::ir::MemArg { align: 8, offset: 0 },
1500                    );
1501                }
1502                
1503                // Leave array pointer on stack
1504                get_base(builder, ctx.heap_ptr_global, aligned_size);
1505            }
1506            // Index access: arr[i] - load from array pointer + 4 + (i * 8)
1507            Expr::Index { object, index, span: _ } => {
1508                // Compile array pointer
1509                self.compile_expr(ctx, builder, object)?;
1510                // Save to compute address: base + 4 + (index * 8)
1511                // Stack: [base_ptr]
1512                
1513                builder.i32_const(4); // Skip length field
1514                builder.binop(walrus::ir::BinaryOp::I32Add);
1515                // Stack: [base_ptr + 4]
1516                
1517                // Compile index
1518                self.compile_expr(ctx, builder, index)?;
1519                // Convert i64 index to i32 for address calculation
1520                builder.unop(walrus::ir::UnaryOp::I32WrapI64);
1521                builder.i32_const(8); // element size
1522                builder.binop(walrus::ir::BinaryOp::I32Mul);
1523                // Stack: [base_ptr + 4, index * 8]
1524                
1525                builder.binop(walrus::ir::BinaryOp::I32Add);
1526                // Stack: [base_ptr + 4 + index * 8]
1527                
1528                // Load i64 element
1529                builder.load(
1530                    ctx.memory_id,
1531                    walrus::ir::LoadKind::I64 { atomic: false },
1532                    walrus::ir::MemArg { align: 8, offset: 0 },
1533                );
1534            }
1535            // Tuple literal: allocate memory and store elements (like struct with indexed fields)
1536            Expr::Tuple(elements, _span) => {
1537                let len = elements.len() as u32;
1538                let element_size = 8u32; // All elements i64 for now
1539                let total_size = len * element_size;
1540                let aligned_size = (total_size + 7) & !7;
1541                
1542                // Allocate
1543                self.emit_alloc(ctx, builder, total_size);
1544                builder.drop(); // We'll recompute base for each store
1545                
1546                // Store each element
1547                for (i, elem) in elements.iter().enumerate() {
1548                    // Address = heap_ptr - aligned_size + (i * 8)
1549                    builder.global_get(ctx.heap_ptr_global);
1550                    builder.i32_const(aligned_size as i32);
1551                    builder.binop(walrus::ir::BinaryOp::I32Sub);
1552                    builder.i32_const((i as u32 * element_size) as i32);
1553                    builder.binop(walrus::ir::BinaryOp::I32Add);
1554                    
1555                    self.compile_expr(ctx, builder, elem)?;
1556                    builder.store(
1557                        ctx.memory_id,
1558                        walrus::ir::StoreKind::I64 { atomic: false },
1559                        walrus::ir::MemArg { align: 8, offset: 0 },
1560                    );
1561                }
1562                
1563                // Leave tuple pointer on stack
1564                builder.global_get(ctx.heap_ptr_global);
1565                builder.i32_const(aligned_size as i32);
1566                builder.binop(walrus::ir::BinaryOp::I32Sub);
1567            }
1568            // Match expression: compile as chained if-else
1569            Expr::Match { scrutinee, arms, span: _ } => {
1570                // Compile scrutinee and store in temp local
1571                self.compile_expr(ctx, builder, scrutinee)?;
1572                builder.local_set(ctx.tmp_i32);
1573                
1574                // Build nested if-else chain for arms
1575                // Each arm: check pattern, if matches execute body
1576                // We'll use a simple approach: each arm is an if/else
1577                
1578                for (i, arm) in arms.iter().enumerate() {
1579                    let is_last = i == arms.len() - 1;
1580                    
1581                    match &arm.pattern {
1582                        crate::ast::Pattern::Wildcard(_) => {
1583                            // Wildcard always matches - just emit the body
1584                            self.compile_expr(ctx, builder, &arm.body)?;
1585                        }
1586                        crate::ast::Pattern::Literal(lit_expr) => {
1587                            // Compare scrutinee with literal
1588                            builder.local_get(ctx.tmp_i32);
1589                            self.compile_expr(ctx, builder, lit_expr)?;
1590                            // Wrap i64 to i32 for comparison if needed
1591                            builder.unop(walrus::ir::UnaryOp::I32WrapI64);
1592                            builder.binop(walrus::ir::BinaryOp::I32Eq);
1593                            
1594                            if is_last {
1595                                // Last arm: just emit body conditionally
1596                                builder.if_else(
1597                                    None,
1598                                    |then_b| { let _ = self.compile_expr(ctx, then_b, &arm.body); },
1599                                    |_else_b| {}
1600                                );
1601                            } else {
1602                                builder.if_else(
1603                                    None,
1604                                    |then_b| { let _ = self.compile_expr(ctx, then_b, &arm.body); },
1605                                    |_else_b| {
1606                                        // Continue to next arm - but we can't recurse easily here
1607                                        // For now, just leave empty - full impl needs restructuring
1608                                    }
1609                                );
1610                            }
1611                        }
1612                        crate::ast::Pattern::Binding { name, .. } => {
1613                            // Binding: bind scrutinee to local and execute body
1614                            if let Some(local_id) = ctx.locals.get(name) {
1615                                builder.local_get(ctx.tmp_i32);
1616                                builder.unop(walrus::ir::UnaryOp::I64ExtendSI32); // Convert back to i64
1617                                builder.local_set(*local_id);
1618                            }
1619                            self.compile_expr(ctx, builder, &arm.body)?;
1620                        }
1621                        crate::ast::Pattern::Variant { variant, .. } => {
1622                            // For enum patterns: load tag, compare with variant tag
1623                            // Load tag from scrutinee pointer
1624                            builder.local_get(ctx.tmp_i32);
1625                            builder.load(
1626                                ctx.memory_id,
1627                                walrus::ir::LoadKind::I32 { atomic: false },
1628                                walrus::ir::MemArg { align: 4, offset: 0 },
1629                            );
1630                            
1631                            // TODO: look up variant tag from enum_layouts
1632                            // For now just use the variant name hash as placeholder
1633                            let tag = variant.len() as i32 % 256; // Placeholder
1634                            builder.i32_const(tag);
1635                            builder.binop(walrus::ir::BinaryOp::I32Eq);
1636                            
1637                            builder.if_else(
1638                                None,
1639                                |then_b| { let _ = self.compile_expr(ctx, then_b, &arm.body); },
1640                                |_else_b| {}
1641                            );
1642                        }
1643                        _ => {
1644                            // Other patterns: just emit body (fallback)
1645                            self.compile_expr(ctx, builder, &arm.body)?;
1646                        }
1647                    }
1648                }
1649            }
1650            // MacroCall: handle println!, print!, dbg!
1651            Expr::MacroCall { name, args, span: _ } => {
1652                match name.as_str() {
1653                    "println" | "print" => {
1654                        // For each argument, determine type and call appropriate print function
1655                        for arg in args {
1656                            match arg {
1657                                Expr::Int(_, _) => {
1658                                    self.compile_expr(ctx, builder, arg)?;
1659                                    if let Some(func_id) = ctx.functions.get("print_i64") {
1660                                        builder.call(*func_id);
1661                                    }
1662                                }
1663                                Expr::Float(_, _) => {
1664                                    self.compile_expr(ctx, builder, arg)?;
1665                                    if let Some(func_id) = ctx.functions.get("print_f64") {
1666                                        builder.call(*func_id);
1667                                    }
1668                                }
1669                                Expr::Bool(_, _) => {
1670                                    self.compile_expr(ctx, builder, arg)?;
1671                                    if let Some(func_id) = ctx.functions.get("print_bool") {
1672                                        builder.call(*func_id);
1673                                    }
1674                                }
1675                                Expr::String(s, _) => {
1676                                    // For strings, we need ptr and len
1677                                    if let Some(&offset) = ctx.string_table.get(s) {
1678                                        // Push pointer (offset + 4 to skip length prefix)
1679                                        builder.i32_const((offset + 4) as i32);
1680                                        // Push length
1681                                        builder.i32_const(s.len() as i32);
1682                                        if let Some(func_id) = ctx.functions.get("print_str") {
1683                                            builder.call(*func_id);
1684                                        }
1685                                    }
1686                                }
1687                                Expr::Ident(_, _) => {
1688                                    // For variables, compile and assume i64 for now
1689                                    self.compile_expr(ctx, builder, arg)?;
1690                                    if let Some(func_id) = ctx.functions.get("print_i64") {
1691                                        builder.call(*func_id);
1692                                    }
1693                                }
1694                                _ => {
1695                                    // Default: compile and print as i64
1696                                    self.compile_expr(ctx, builder, arg)?;
1697                                    if let Some(func_id) = ctx.functions.get("print_i64") {
1698                                        builder.call(*func_id);
1699                                    }
1700                                }
1701                            }
1702                        }
1703                        // Push a dummy value since expressions need to produce something
1704                        builder.i64_const(0);
1705                    }
1706                    "dbg" => {
1707                        // Debug: print and return the value
1708                        if let Some(arg) = args.first() {
1709                            self.compile_expr(ctx, builder, arg)?;
1710                            // Duplicate for print and return
1711                            // Actually can't dup easily, so just print
1712                            if let Some(func_id) = ctx.functions.get("print_i64") {
1713                                builder.call(*func_id);
1714                            }
1715                        }
1716                        builder.i64_const(0);
1717                    }
1718                    _ => {
1719                        // Unknown macro - just push 0
1720                        builder.i64_const(0);
1721                    }
1722                }
1723            }
1724            // Range expression: for now just push start value since ranges are handled inline in for loops
1725            Expr::Range { start, end: _, inclusive: _, span: _ } => {
1726                // Ranges are typically used inline in for loops
1727                // If used standalone, just return the start value
1728                if let Some(start_expr) = start {
1729                    self.compile_expr(ctx, builder, start_expr)?;
1730                } else {
1731                    builder.i64_const(0);
1732                }
1733            }
1734            // Lambda expression: return table index for the pre-compiled lambda function
1735            Expr::Lambda { params, return_type: _, body: _, span: _ } => {
1736                // Lambdas are compiled in pre-pass and stored in lambda_table
1737                // Find the lambda by matching parameter count (simplified - proper impl would use unique IDs)
1738                // For now, we need to track which lambda this is
1739                // 
1740                // Since lambdas are assigned IDs in order during collection,
1741                // we need to find which ID this lambda has
1742                // This is a limitation - proper impl would tag each lambda AST with an ID
1743                //
1744                // For now, push the table index based on param count heuristic
1745                // This works if lambdas are unique by param count
1746                let _param_count = params.len() as u32;
1747                
1748                // Search lambda_table for a lambda with matching param count
1749                let mut found_index = 0i32;
1750                for (_id, (table_idx, _func_id)) in ctx.lambda_table.iter() {
1751                    // Simple heuristic: use first lambda if param counts can't be matched
1752                    found_index = *table_idx as i32;
1753                    break; // TODO: proper ID tracking
1754                }
1755                
1756                // Push table index as i32 (for call_indirect)
1757                builder.i32_const(found_index);
1758            }
1759            // Block expression: compile all statements, return last expression value
1760            Expr::Block(block, _span) => {
1761                // Compile all statements except the last
1762                for (i, stmt) in block.stmts.iter().enumerate() {
1763                    if i < block.stmts.len() - 1 {
1764                        self.compile_stmt(ctx, builder, stmt)?;
1765                    } else {
1766                        // Last statement - if it's an expression, keep its value
1767                        if let Stmt::Expr(expr) = stmt {
1768                            self.compile_expr(ctx, builder, expr)?;
1769                        } else {
1770                            self.compile_stmt(ctx, builder, stmt)?;
1771                            builder.i64_const(0); // Block returns unit
1772                        }
1773                    }
1774                }
1775                if block.stmts.is_empty() {
1776                    builder.i64_const(0);
1777                }
1778            }
1779            _ => {}
1780        }
1781        Ok(())
1782    }
1783
1784    fn compile_else_branch(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, branch: &crate::ast::ElseBranch) -> KoreResult<()> {
1785        match branch {
1786            crate::ast::ElseBranch::Else(block) => {
1787                let _ = self.compile_block(ctx, builder, block);
1788            }
1789            crate::ast::ElseBranch::ElseIf(cond, then, next_else) => {
1790                self.compile_expr(ctx, builder, cond)?;
1791                
1792                builder.if_else(
1793                    None, 
1794                    |then_builder| {
1795                        let _ = self.compile_block(ctx, then_builder, then);
1796                    },
1797                    |else_builder| {
1798                        if let Some(next) = next_else {
1799                            let _ = self.compile_else_branch(ctx, else_builder, next);
1800                        }
1801                    }
1802                );
1803            }
1804        }
1805        Ok(())
1806    }
1807
1808    fn compile_jsx_node(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, node: &crate::ast::JSXNode) -> KoreResult<()> {
1809        match node {
1810            crate::ast::JSXNode::Element { tag, attributes, children, .. } => {
1811                // 1. Compile Children
1812                for child in children {
1813                    self.compile_jsx_node(ctx, builder, child)?;
1814                }
1815                
1816                // 2. Allocate Children Array
1817                let child_count = children.len() as u32;
1818                let children_size = 4 + (child_count * 4);
1819                self.emit_alloc(ctx, builder, children_size);
1820                builder.local_set(ctx.tmp_i32); // Save array ptr
1821                
1822                // Store children (Reverse order because they are on stack)
1823                for i in (0..child_count).rev() {
1824                    // Stack: [.., child_val]
1825                    builder.local_set(ctx.tmp_i32_2); // Pop child val
1826                    
1827                    // Addr = base + 4 + i*4
1828                    builder.local_get(ctx.tmp_i32);
1829                    builder.i32_const((4 + i * 4) as i32);
1830                    builder.binop(walrus::ir::BinaryOp::I32Add);
1831                    
1832                    builder.local_get(ctx.tmp_i32_2); // Val
1833                    
1834                    builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1835                }
1836                
1837                // Store length
1838                builder.local_get(ctx.tmp_i32);
1839                builder.i32_const(child_count as i32);
1840                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1841                
1842                // Keep Children Array Ptr on stack (Wait, we stored it in tmp_i32, but we need to push it back)
1843                // BUT we have Props to compile. If Props use tmp_i32, we lose it.
1844                // We MUST push it to stack now.
1845                builder.local_get(ctx.tmp_i32);
1846                // Stack: [children_ptr]
1847                
1848                // 3. Compile Props
1849                let props_count = attributes.len() as u32;
1850                for attr in attributes {
1851                     // Key
1852                     if let Some(&offset) = ctx.string_table.get(&attr.name) {
1853                         builder.i32_const((offset + 4) as i32);
1854                     } else {
1855                         builder.i32_const(0);
1856                     }
1857                     
1858                     // Value
1859                     match &attr.value {
1860                         crate::ast::JSXAttrValue::String(s) => {
1861                             if let Some(&offset) = ctx.string_table.get(s) {
1862                                 builder.i32_const((offset + 4) as i32);
1863                             } else {
1864                                 builder.i32_const(0);
1865                             }
1866                             builder.unop(walrus::ir::UnaryOp::I64ExtendUI32);
1867                         },
1868                         crate::ast::JSXAttrValue::Expr(e) => {
1869                             self.compile_expr(ctx, builder, e)?;
1870                         },
1871                         crate::ast::JSXAttrValue::Bool(b) => {
1872                             builder.i64_const(if *b { 1 } else { 0 });
1873                         }
1874                     }
1875                }
1876                
1877                // Allocate Props Array
1878                let props_item_size = 12;
1879                let props_size = 4 + (props_count * props_item_size);
1880                self.emit_alloc(ctx, builder, props_size);
1881                builder.local_set(ctx.tmp_i32); // Save props array ptr
1882                
1883                // Store Props (Reverse)
1884                for i in (0..props_count).rev() {
1885                    builder.local_set(ctx.tmp_i64); // Pop val (i64)
1886                    builder.local_set(ctx.tmp_i32_2); // Pop key (i32)
1887                    
1888                    // Store Key
1889                    builder.local_get(ctx.tmp_i32);
1890                    builder.i32_const((4 + i * props_item_size) as i32);
1891                    builder.binop(walrus::ir::BinaryOp::I32Add);
1892                    builder.local_get(ctx.tmp_i32_2);
1893                    builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1894
1895                    // Store Val
1896                    builder.local_get(ctx.tmp_i32);
1897                    builder.i32_const((4 + i * props_item_size + 4) as i32);
1898                    builder.binop(walrus::ir::BinaryOp::I32Add);
1899                    builder.local_get(ctx.tmp_i64);
1900                    builder.store(ctx.memory_id, walrus::ir::StoreKind::I64 { atomic: false }, walrus::ir::MemArg { align: 8, offset: 0 });
1901                }
1902                
1903                // Store Props Length
1904                builder.local_get(ctx.tmp_i32);
1905                builder.i32_const(props_count as i32);
1906                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1907                
1908                // Push Props Ptr
1909                builder.local_get(ctx.tmp_i32);
1910                
1911                // Stack: [children_ptr, props_ptr]
1912                
1913                // 4. Allocate VNode (16 bytes)
1914                self.emit_alloc(ctx, builder, 16);
1915                builder.local_set(ctx.tmp_i32); // VNode Ptr
1916                
1917                // Store Props Ptr (offset 8)
1918                // Stack: [children_ptr, props_ptr]
1919                builder.local_set(ctx.tmp_i32_2); // props_ptr
1920                
1921                builder.local_get(ctx.tmp_i32);
1922                builder.i32_const(8);
1923                builder.binop(walrus::ir::BinaryOp::I32Add);
1924                builder.local_get(ctx.tmp_i32_2);
1925                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1926                
1927                // Store Children Ptr (offset 12)
1928                // Stack: [children_ptr]
1929                builder.local_set(ctx.tmp_i32_2); // children_ptr
1930                
1931                builder.local_get(ctx.tmp_i32);
1932                builder.i32_const(12);
1933                builder.binop(walrus::ir::BinaryOp::I32Add);
1934                builder.local_get(ctx.tmp_i32_2);
1935                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1936                
1937                // Store Type = 1 (Element) (offset 0)
1938                builder.local_get(ctx.tmp_i32);
1939                builder.i32_const(1);
1940                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1941                
1942                // Store Tag (offset 4)
1943                let tag_ptr = if let Some(&offset) = ctx.string_table.get(tag) { offset + 4 } else { 0 };
1944                builder.local_get(ctx.tmp_i32);
1945                builder.i32_const(tag_ptr as i32);
1946                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 4 });
1947                
1948                // Return VNode Ptr
1949                builder.local_get(ctx.tmp_i32);
1950            }
1951            crate::ast::JSXNode::Text(s, _) => {
1952                self.emit_alloc(ctx, builder, 16);
1953                builder.local_set(ctx.tmp_i32);
1954                
1955                builder.local_get(ctx.tmp_i32);
1956                builder.i32_const(0); // Type = 0 (Text)
1957                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 0 });
1958                
1959                let text_ptr = if let Some(&offset) = ctx.string_table.get(s) { offset + 4 } else { 0 };
1960                builder.local_get(ctx.tmp_i32);
1961                builder.i32_const(text_ptr as i32);
1962                builder.store(ctx.memory_id, walrus::ir::StoreKind::I32 { atomic: false }, walrus::ir::MemArg { align: 4, offset: 12 }); // Store in text field (offset 12)
1963                
1964                builder.local_get(ctx.tmp_i32);
1965            }
1966            crate::ast::JSXNode::Expression(e) => {
1967                 self.compile_expr(ctx, builder, e)?;
1968                 builder.unop(walrus::ir::UnaryOp::I32WrapI64);
1969            }
1970            _ => {
1971                 builder.i32_const(0);
1972            }
1973        }
1974        Ok(())
1975    }
1976
1977    fn emit_store_for_expr(&self, ctx: &CompilationContext, builder: &mut InstrSeqBuilder, expr: &Expr, offset: u32) {
1978        match expr {
1979            Expr::Int(_, _) => {
1980                builder.store(
1981                    ctx.memory_id,
1982                    walrus::ir::StoreKind::I64 { atomic: false },
1983                    walrus::ir::MemArg { align: 8, offset },
1984                );
1985            }
1986            Expr::Float(_, _) => {
1987                builder.store(
1988                    ctx.memory_id,
1989                    walrus::ir::StoreKind::F64,
1990                    walrus::ir::MemArg { align: 8, offset },
1991                );
1992            }
1993            Expr::Bool(_, _) | Expr::String(_, _) => {
1994                builder.store(
1995                    ctx.memory_id,
1996                    walrus::ir::StoreKind::I32 { atomic: false },
1997                    walrus::ir::MemArg { align: 4, offset },
1998                );
1999            }
2000            _ => {
2001                // Default to I64 (pointers, arrays, structs, etc)
2002                builder.store(
2003                    ctx.memory_id,
2004                    walrus::ir::StoreKind::I64 { atomic: false },
2005                    walrus::ir::MemArg { align: 8, offset },
2006                );
2007            }
2008        }
2009    }
2010}
2011