Skip to main content

oxilean_codegen/native_backend/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5use crate::lcnf::*;
6use std::collections::{HashMap, HashSet};
7
8use super::functions::*;
9use std::collections::VecDeque;
10
11/// A unique identifier for a basic block.
12#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
13pub struct BlockId(pub u32);
14#[allow(dead_code)]
15#[derive(Debug, Clone)]
16pub struct NatLivenessInfo {
17    pub live_in: Vec<std::collections::HashSet<u32>>,
18    pub live_out: Vec<std::collections::HashSet<u32>>,
19    pub defs: Vec<std::collections::HashSet<u32>>,
20    pub uses: Vec<std::collections::HashSet<u32>>,
21}
22impl NatLivenessInfo {
23    #[allow(dead_code)]
24    pub fn new(block_count: usize) -> Self {
25        NatLivenessInfo {
26            live_in: vec![std::collections::HashSet::new(); block_count],
27            live_out: vec![std::collections::HashSet::new(); block_count],
28            defs: vec![std::collections::HashSet::new(); block_count],
29            uses: vec![std::collections::HashSet::new(); block_count],
30        }
31    }
32    #[allow(dead_code)]
33    pub fn add_def(&mut self, block: usize, var: u32) {
34        if block < self.defs.len() {
35            self.defs[block].insert(var);
36        }
37    }
38    #[allow(dead_code)]
39    pub fn add_use(&mut self, block: usize, var: u32) {
40        if block < self.uses.len() {
41            self.uses[block].insert(var);
42        }
43    }
44    #[allow(dead_code)]
45    pub fn is_live_in(&self, block: usize, var: u32) -> bool {
46        self.live_in
47            .get(block)
48            .map(|s| s.contains(&var))
49            .unwrap_or(false)
50    }
51    #[allow(dead_code)]
52    pub fn is_live_out(&self, block: usize, var: u32) -> bool {
53        self.live_out
54            .get(block)
55            .map(|s| s.contains(&var))
56            .unwrap_or(false)
57    }
58}
59#[allow(dead_code)]
60pub struct NatConstantFoldingHelper;
61impl NatConstantFoldingHelper {
62    #[allow(dead_code)]
63    pub fn fold_add_i64(a: i64, b: i64) -> Option<i64> {
64        a.checked_add(b)
65    }
66    #[allow(dead_code)]
67    pub fn fold_sub_i64(a: i64, b: i64) -> Option<i64> {
68        a.checked_sub(b)
69    }
70    #[allow(dead_code)]
71    pub fn fold_mul_i64(a: i64, b: i64) -> Option<i64> {
72        a.checked_mul(b)
73    }
74    #[allow(dead_code)]
75    pub fn fold_div_i64(a: i64, b: i64) -> Option<i64> {
76        if b == 0 {
77            None
78        } else {
79            a.checked_div(b)
80        }
81    }
82    #[allow(dead_code)]
83    pub fn fold_add_f64(a: f64, b: f64) -> f64 {
84        a + b
85    }
86    #[allow(dead_code)]
87    pub fn fold_mul_f64(a: f64, b: f64) -> f64 {
88        a * b
89    }
90    #[allow(dead_code)]
91    pub fn fold_neg_i64(a: i64) -> Option<i64> {
92        a.checked_neg()
93    }
94    #[allow(dead_code)]
95    pub fn fold_not_bool(a: bool) -> bool {
96        !a
97    }
98    #[allow(dead_code)]
99    pub fn fold_and_bool(a: bool, b: bool) -> bool {
100        a && b
101    }
102    #[allow(dead_code)]
103    pub fn fold_or_bool(a: bool, b: bool) -> bool {
104        a || b
105    }
106    #[allow(dead_code)]
107    pub fn fold_shl_i64(a: i64, b: u32) -> Option<i64> {
108        a.checked_shl(b)
109    }
110    #[allow(dead_code)]
111    pub fn fold_shr_i64(a: i64, b: u32) -> Option<i64> {
112        a.checked_shr(b)
113    }
114    #[allow(dead_code)]
115    pub fn fold_rem_i64(a: i64, b: i64) -> Option<i64> {
116        if b == 0 {
117            None
118        } else {
119            Some(a % b)
120        }
121    }
122    #[allow(dead_code)]
123    pub fn fold_bitand_i64(a: i64, b: i64) -> i64 {
124        a & b
125    }
126    #[allow(dead_code)]
127    pub fn fold_bitor_i64(a: i64, b: i64) -> i64 {
128        a | b
129    }
130    #[allow(dead_code)]
131    pub fn fold_bitxor_i64(a: i64, b: i64) -> i64 {
132        a ^ b
133    }
134    #[allow(dead_code)]
135    pub fn fold_bitnot_i64(a: i64) -> i64 {
136        !a
137    }
138}
139/// A compiled native module.
140#[derive(Debug, Clone)]
141pub struct NativeModule {
142    /// Functions in this module.
143    pub functions: Vec<NativeFunc>,
144    /// Global variables.
145    pub globals: Vec<(String, NativeType, Option<i64>)>,
146    /// External function declarations.
147    pub extern_fns: Vec<(String, Vec<NativeType>, NativeType)>,
148    /// Module name.
149    pub name: String,
150}
151impl NativeModule {
152    /// Create a new empty module.
153    pub(super) fn new(name: &str) -> Self {
154        NativeModule {
155            functions: Vec::new(),
156            globals: Vec::new(),
157            extern_fns: Vec::new(),
158            name: name.to_string(),
159        }
160    }
161    /// Total number of instructions in the module.
162    pub fn total_instructions(&self) -> usize {
163        self.functions.iter().map(|f| f.instruction_count()).sum()
164    }
165    /// Find a function by name.
166    pub fn get_function(&self, name: &str) -> Option<&NativeFunc> {
167        self.functions.iter().find(|f| f.name == name)
168    }
169}
170#[allow(dead_code)]
171#[derive(Debug, Clone)]
172pub struct NatDepGraph {
173    pub(super) nodes: Vec<u32>,
174    pub(super) edges: Vec<(u32, u32)>,
175}
176impl NatDepGraph {
177    #[allow(dead_code)]
178    pub fn new() -> Self {
179        NatDepGraph {
180            nodes: Vec::new(),
181            edges: Vec::new(),
182        }
183    }
184    #[allow(dead_code)]
185    pub fn add_node(&mut self, id: u32) {
186        if !self.nodes.contains(&id) {
187            self.nodes.push(id);
188        }
189    }
190    #[allow(dead_code)]
191    pub fn add_dep(&mut self, dep: u32, dependent: u32) {
192        self.add_node(dep);
193        self.add_node(dependent);
194        self.edges.push((dep, dependent));
195    }
196    #[allow(dead_code)]
197    pub fn dependents_of(&self, node: u32) -> Vec<u32> {
198        self.edges
199            .iter()
200            .filter(|(d, _)| *d == node)
201            .map(|(_, dep)| *dep)
202            .collect()
203    }
204    #[allow(dead_code)]
205    pub fn dependencies_of(&self, node: u32) -> Vec<u32> {
206        self.edges
207            .iter()
208            .filter(|(_, dep)| *dep == node)
209            .map(|(d, _)| *d)
210            .collect()
211    }
212    #[allow(dead_code)]
213    pub fn topological_sort(&self) -> Vec<u32> {
214        let mut in_degree: std::collections::HashMap<u32, u32> = std::collections::HashMap::new();
215        for &n in &self.nodes {
216            in_degree.insert(n, 0);
217        }
218        for (_, dep) in &self.edges {
219            *in_degree.entry(*dep).or_insert(0) += 1;
220        }
221        let mut queue: std::collections::VecDeque<u32> = self
222            .nodes
223            .iter()
224            .filter(|&&n| in_degree[&n] == 0)
225            .copied()
226            .collect();
227        let mut result = Vec::new();
228        while let Some(node) = queue.pop_front() {
229            result.push(node);
230            for dep in self.dependents_of(node) {
231                let cnt = in_degree.entry(dep).or_insert(0);
232                *cnt = cnt.saturating_sub(1);
233                if *cnt == 0 {
234                    queue.push_back(dep);
235                }
236            }
237        }
238        result
239    }
240    #[allow(dead_code)]
241    pub fn has_cycle(&self) -> bool {
242        self.topological_sort().len() < self.nodes.len()
243    }
244}
245#[allow(dead_code)]
246#[derive(Debug, Clone)]
247pub struct NatDominatorTree {
248    pub idom: Vec<Option<u32>>,
249    pub dom_children: Vec<Vec<u32>>,
250    pub dom_depth: Vec<u32>,
251}
252impl NatDominatorTree {
253    #[allow(dead_code)]
254    pub fn new(size: usize) -> Self {
255        NatDominatorTree {
256            idom: vec![None; size],
257            dom_children: vec![Vec::new(); size],
258            dom_depth: vec![0; size],
259        }
260    }
261    #[allow(dead_code)]
262    pub fn set_idom(&mut self, node: usize, idom: u32) {
263        self.idom[node] = Some(idom);
264    }
265    #[allow(dead_code)]
266    pub fn dominates(&self, a: usize, b: usize) -> bool {
267        if a == b {
268            return true;
269        }
270        let mut cur = b;
271        loop {
272            match self.idom[cur] {
273                Some(parent) if parent as usize == a => return true,
274                Some(parent) if parent as usize == cur => return false,
275                Some(parent) => cur = parent as usize,
276                None => return false,
277            }
278        }
279    }
280    #[allow(dead_code)]
281    pub fn depth(&self, node: usize) -> u32 {
282        self.dom_depth.get(node).copied().unwrap_or(0)
283    }
284}
285/// Configuration for the native backend.
286#[derive(Debug, Clone)]
287pub struct NativeEmitConfig {
288    /// Optimization level: 0 = none, 1 = basic, 2 = full, 3 = aggressive.
289    pub opt_level: u8,
290    /// Whether to generate debug info.
291    pub debug_info: bool,
292    /// Target architecture hint.
293    pub target_arch: String,
294    /// Number of general-purpose registers available.
295    pub num_gp_regs: usize,
296    /// Whether to emit comments in the IR.
297    pub emit_comments: bool,
298}
299/// A register in the native IR (SSA form).
300///
301/// Registers numbered 0..65535 are virtual (pre-allocation).
302/// Registers >= 65536 are physical (post-allocation).
303#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
304pub struct Register(pub u32);
305
306/// The boundary between virtual and physical register indices.
307pub(super) const VIRT_PHYS_BOUNDARY: u32 = 65536;
308
309impl Register {
310    /// Whether this is a virtual register (pre-allocation).
311    pub fn is_virtual(&self) -> bool {
312        self.0 < VIRT_PHYS_BOUNDARY
313    }
314    /// Whether this is a physical register (post-allocation).
315    pub fn is_physical(&self) -> bool {
316        self.0 >= VIRT_PHYS_BOUNDARY
317    }
318    /// Create a virtual register.
319    pub fn virt(n: u32) -> Self {
320        assert!(
321            n < VIRT_PHYS_BOUNDARY,
322            "Virtual register index must be < {}",
323            VIRT_PHYS_BOUNDARY
324        );
325        Register(n)
326    }
327    /// Create a physical register.
328    pub fn phys(n: u32) -> Self {
329        Register(n + VIRT_PHYS_BOUNDARY)
330    }
331}
332#[allow(dead_code)]
333#[derive(Debug, Clone, Default)]
334pub struct NatPassStats {
335    pub total_runs: u32,
336    pub successful_runs: u32,
337    pub total_changes: u64,
338    pub time_ms: u64,
339    pub iterations_used: u32,
340}
341impl NatPassStats {
342    #[allow(dead_code)]
343    pub fn new() -> Self {
344        Self::default()
345    }
346    #[allow(dead_code)]
347    pub fn record_run(&mut self, changes: u64, time_ms: u64, iterations: u32) {
348        self.total_runs += 1;
349        self.successful_runs += 1;
350        self.total_changes += changes;
351        self.time_ms += time_ms;
352        self.iterations_used = iterations;
353    }
354    #[allow(dead_code)]
355    pub fn average_changes_per_run(&self) -> f64 {
356        if self.total_runs == 0 {
357            return 0.0;
358        }
359        self.total_changes as f64 / self.total_runs as f64
360    }
361    #[allow(dead_code)]
362    pub fn success_rate(&self) -> f64 {
363        if self.total_runs == 0 {
364            return 0.0;
365        }
366        self.successful_runs as f64 / self.total_runs as f64
367    }
368    #[allow(dead_code)]
369    pub fn format_summary(&self) -> String {
370        format!(
371            "Runs: {}/{}, Changes: {}, Time: {}ms",
372            self.successful_runs, self.total_runs, self.total_changes, self.time_ms
373        )
374    }
375}
376/// The native code generation backend.
377///
378/// Compiles LCNF modules to native IR suitable for JIT or AOT compilation.
379pub struct NativeBackend {
380    pub(super) config: NativeEmitConfig,
381    pub(super) stats: NativeEmitStats,
382    /// Next virtual register to allocate.
383    pub(super) next_vreg: u32,
384    /// Next block ID to allocate.
385    pub(super) next_block: u32,
386    /// Mapping from LCNF variable IDs to native registers.
387    pub(super) var_map: HashMap<LcnfVarId, Register>,
388    /// Next stack slot.
389    pub(super) next_stack_slot: u32,
390}
391impl NativeBackend {
392    /// Create a new native backend.
393    pub fn new(config: NativeEmitConfig) -> Self {
394        NativeBackend {
395            config,
396            stats: NativeEmitStats::default(),
397            next_vreg: 0,
398            next_block: 0,
399            var_map: HashMap::new(),
400            next_stack_slot: 0,
401        }
402    }
403    /// Create a native backend with default configuration.
404    pub fn default_backend() -> Self {
405        Self::new(NativeEmitConfig::default())
406    }
407    /// Allocate a fresh virtual register.
408    pub(super) fn alloc_vreg(&mut self) -> Register {
409        let r = Register::virt(self.next_vreg);
410        self.next_vreg += 1;
411        self.stats.virtual_regs_used += 1;
412        r
413    }
414    /// Allocate a fresh block ID.
415    pub(super) fn alloc_block(&mut self) -> BlockId {
416        let id = BlockId(self.next_block);
417        self.next_block += 1;
418        self.stats.blocks_generated += 1;
419        id
420    }
421    /// Allocate a stack slot.
422    pub(super) fn alloc_stack_slot(&mut self) -> u32 {
423        let slot = self.next_stack_slot;
424        self.next_stack_slot += 1;
425        self.stats.stack_slots_allocated += 1;
426        slot
427    }
428    /// Reset per-function state.
429    pub(super) fn reset_function_state(&mut self) {
430        self.next_vreg = 0;
431        self.next_block = 0;
432        self.var_map.clear();
433        self.next_stack_slot = 0;
434    }
435    /// Get or allocate a register for an LCNF variable.
436    pub(super) fn get_var_reg(&mut self, id: LcnfVarId) -> Register {
437        if let Some(r) = self.var_map.get(&id) {
438            *r
439        } else {
440            let r = self.alloc_vreg();
441            self.var_map.insert(id, r);
442            r
443        }
444    }
445    /// Compile an entire LCNF module to native IR.
446    pub fn compile_module(&mut self, module: &LcnfModule) -> NativeModule {
447        let mut native_module = NativeModule::new(&module.name);
448        for ext in &module.extern_decls {
449            let params: Vec<NativeType> = ext
450                .params
451                .iter()
452                .filter(|p| !p.erased)
453                .map(|p| lcnf_type_to_native(&p.ty))
454                .collect();
455            let ret = lcnf_type_to_native(&ext.ret_type);
456            native_module
457                .extern_fns
458                .push((ext.name.clone(), params, ret));
459        }
460        for fun in &module.fun_decls {
461            let native_func = self.compile_fun_decl(fun);
462            native_module.functions.push(native_func);
463            self.stats.functions_compiled += 1;
464        }
465        native_module
466    }
467    /// Compile a single LCNF function declaration.
468    pub fn compile_fun_decl(&mut self, decl: &LcnfFunDecl) -> NativeFunc {
469        self.reset_function_state();
470        let mut params = Vec::new();
471        for p in &decl.params {
472            if p.erased {
473                continue;
474            }
475            let r = self.alloc_vreg();
476            self.var_map.insert(p.id, r);
477            params.push((r, lcnf_type_to_native(&p.ty)));
478        }
479        let ret_type = lcnf_type_to_native(&decl.ret_type);
480        let mut func = NativeFunc::new(&decl.name, params, ret_type);
481        func.is_recursive = decl.is_recursive;
482        let entry_id = self.alloc_block();
483        let mut entry_block = BasicBlock::new(entry_id);
484        if self.config.emit_comments {
485            entry_block.push_inst(NativeInst::Comment(format!("Function: {}", decl.name)));
486        }
487        let mut extra_blocks = Vec::new();
488        self.compile_expr(&decl.body, &mut entry_block, &mut extra_blocks, &ret_type);
489        func.blocks.push(entry_block);
490        func.blocks.extend(extra_blocks);
491        func.stack_size = self.next_stack_slot as usize * 8;
492        func
493    }
494    /// Compile an LCNF expression, appending instructions to the current block.
495    /// May create additional basic blocks for control flow.
496    pub(super) fn compile_expr(
497        &mut self,
498        expr: &LcnfExpr,
499        current_block: &mut BasicBlock,
500        extra_blocks: &mut Vec<BasicBlock>,
501        ret_type: &NativeType,
502    ) {
503        match expr {
504            LcnfExpr::Let {
505                id,
506                ty,
507                value,
508                body,
509                ..
510            } => {
511                let native_ty = lcnf_type_to_native(ty);
512                let dst = self.alloc_vreg();
513                self.var_map.insert(*id, dst);
514                self.compile_let_value(value, dst, &native_ty, current_block);
515                self.compile_expr(body, current_block, extra_blocks, ret_type);
516            }
517            LcnfExpr::Case {
518                scrutinee,
519                alts,
520                default,
521                ..
522            } => {
523                let scrut_reg = self.get_var_reg(*scrutinee);
524                if alts.is_empty() {
525                    if let Some(def) = default {
526                        self.compile_expr(def, current_block, extra_blocks, ret_type);
527                    } else {
528                        current_block.push_inst(NativeInst::Ret { value: None });
529                    }
530                    return;
531                }
532                let merge_id = self.alloc_block();
533                let merge_result_reg = self.alloc_vreg();
534                let mut merge_block = BasicBlock::new(merge_id);
535                if *ret_type != NativeType::Void {
536                    merge_block.params.push((merge_result_reg, *ret_type));
537                }
538                if alts.len() == 1 && default.is_some() {
539                    let alt = &alts[0];
540                    let tag_reg = self.alloc_vreg();
541                    current_block.push_inst(NativeInst::Call {
542                        dst: Some(tag_reg),
543                        func: NativeValue::FRef("lean_obj_tag".to_string()),
544                        args: vec![NativeValue::Reg(scrut_reg)],
545                        ret_type: NativeType::I32,
546                    });
547                    let cmp_reg = self.alloc_vreg();
548                    current_block.push_inst(NativeInst::Cmp {
549                        dst: cmp_reg,
550                        cc: CondCode::Eq,
551                        ty: NativeType::I32,
552                        lhs: NativeValue::Reg(tag_reg),
553                        rhs: NativeValue::Imm(alt.ctor_tag as i64),
554                    });
555                    let then_id = self.alloc_block();
556                    let else_id = self.alloc_block();
557                    current_block.push_inst(NativeInst::CondBr {
558                        cond: NativeValue::Reg(cmp_reg),
559                        then_target: then_id,
560                        else_target: else_id,
561                    });
562                    let mut then_block = BasicBlock::new(then_id);
563                    self.bind_ctor_params(&alt.params, scrut_reg, &mut then_block);
564                    self.compile_expr(&alt.body, &mut then_block, extra_blocks, ret_type);
565                    if then_block.terminator.is_none() {
566                        then_block.push_inst(NativeInst::Br { target: merge_id });
567                    }
568                    extra_blocks.push(then_block);
569                    let mut else_block = BasicBlock::new(else_id);
570                    self.compile_expr(
571                        default
572                            .as_ref()
573                            .expect(
574                                "default is Some; guaranteed by default.is_some() check at if condition",
575                            ),
576                        &mut else_block,
577                        extra_blocks,
578                        ret_type,
579                    );
580                    if else_block.terminator.is_none() {
581                        else_block.push_inst(NativeInst::Br { target: merge_id });
582                    }
583                    extra_blocks.push(else_block);
584                } else {
585                    let tag_reg = self.alloc_vreg();
586                    current_block.push_inst(NativeInst::Call {
587                        dst: Some(tag_reg),
588                        func: NativeValue::FRef("lean_obj_tag".to_string()),
589                        args: vec![NativeValue::Reg(scrut_reg)],
590                        ret_type: NativeType::I32,
591                    });
592                    let default_id = self.alloc_block();
593                    let mut targets = Vec::new();
594                    for alt in alts {
595                        let alt_block_id = self.alloc_block();
596                        targets.push((alt.ctor_tag as u64, alt_block_id));
597                        let mut alt_block = BasicBlock::new(alt_block_id);
598                        self.bind_ctor_params(&alt.params, scrut_reg, &mut alt_block);
599                        self.compile_expr(&alt.body, &mut alt_block, extra_blocks, ret_type);
600                        if alt_block.terminator.is_none() {
601                            alt_block.push_inst(NativeInst::Br { target: merge_id });
602                        }
603                        extra_blocks.push(alt_block);
604                    }
605                    current_block.push_inst(NativeInst::Switch {
606                        value: NativeValue::Reg(tag_reg),
607                        default: default_id,
608                        targets,
609                    });
610                    let mut def_block = BasicBlock::new(default_id);
611                    if let Some(def_expr) = default {
612                        self.compile_expr(def_expr, &mut def_block, extra_blocks, ret_type);
613                    } else {
614                        def_block.push_inst(NativeInst::Ret { value: None });
615                    }
616                    if def_block.terminator.is_none() {
617                        def_block.push_inst(NativeInst::Br { target: merge_id });
618                    }
619                    extra_blocks.push(def_block);
620                }
621                extra_blocks.push(merge_block);
622            }
623            LcnfExpr::Return(arg) => {
624                let val = self.compile_arg(arg, current_block);
625                current_block.push_inst(NativeInst::Ret { value: Some(val) });
626            }
627            LcnfExpr::Unreachable => {
628                current_block.push_inst(NativeInst::Call {
629                    dst: None,
630                    func: NativeValue::FRef("lean_internal_panic_unreachable".to_string()),
631                    args: vec![],
632                    ret_type: NativeType::Void,
633                });
634                current_block.push_inst(NativeInst::Ret { value: None });
635            }
636            LcnfExpr::TailCall(func, args) => {
637                let func_val = self.compile_arg(func, current_block);
638                let arg_vals: Vec<NativeValue> = args
639                    .iter()
640                    .map(|a| self.compile_arg(a, current_block))
641                    .collect();
642                let result_reg = self.alloc_vreg();
643                current_block.push_inst(NativeInst::Call {
644                    dst: Some(result_reg),
645                    func: func_val,
646                    args: arg_vals,
647                    ret_type: *ret_type,
648                });
649                current_block.push_inst(NativeInst::Ret {
650                    value: Some(NativeValue::Reg(result_reg)),
651                });
652            }
653        }
654    }
655    /// Compile a let-value into instructions.
656    pub(super) fn compile_let_value(
657        &mut self,
658        value: &LcnfLetValue,
659        dst: Register,
660        _ty: &NativeType,
661        block: &mut BasicBlock,
662    ) {
663        match value {
664            LcnfLetValue::App(func, args) => {
665                let func_val = self.compile_arg_to_value(func);
666                let arg_vals: Vec<NativeValue> =
667                    args.iter().map(|a| self.compile_arg_to_value(a)).collect();
668                block.push_inst(NativeInst::Call {
669                    dst: Some(dst),
670                    func: func_val,
671                    args: arg_vals,
672                    ret_type: NativeType::Ptr,
673                });
674                self.stats.instructions_generated += 1;
675            }
676            LcnfLetValue::Proj(_name, idx, var) => {
677                let obj_reg = self.get_var_reg(*var);
678                block.push_inst(NativeInst::Call {
679                    dst: Some(dst),
680                    func: NativeValue::FRef("lean_ctor_get".to_string()),
681                    args: vec![NativeValue::Reg(obj_reg), NativeValue::Imm(*idx as i64)],
682                    ret_type: NativeType::Ptr,
683                });
684                self.stats.instructions_generated += 1;
685            }
686            LcnfLetValue::Ctor(_name, tag, args) => {
687                let num_objs = args.len();
688                block.push_inst(NativeInst::Call {
689                    dst: Some(dst),
690                    func: NativeValue::FRef("lean_alloc_ctor".to_string()),
691                    args: vec![
692                        NativeValue::Imm(*tag as i64),
693                        NativeValue::Imm(num_objs as i64),
694                        NativeValue::Imm(0),
695                    ],
696                    ret_type: NativeType::Ptr,
697                });
698                self.stats.instructions_generated += 1;
699                for (i, arg) in args.iter().enumerate() {
700                    let val = self.compile_arg_to_value(arg);
701                    block.push_inst(NativeInst::Call {
702                        dst: None,
703                        func: NativeValue::FRef("lean_ctor_set".to_string()),
704                        args: vec![NativeValue::Reg(dst), NativeValue::Imm(i as i64), val],
705                        ret_type: NativeType::Void,
706                    });
707                    self.stats.instructions_generated += 1;
708                }
709            }
710            LcnfLetValue::Lit(lit) => match lit {
711                LcnfLit::Nat(n) => {
712                    block.push_inst(NativeInst::LoadImm {
713                        dst,
714                        ty: NativeType::I64,
715                        value: *n as i64,
716                    });
717                    self.stats.instructions_generated += 1;
718                }
719                LcnfLit::Str(s) => {
720                    block.push_inst(NativeInst::Call {
721                        dst: Some(dst),
722                        func: NativeValue::FRef("lean_mk_string".to_string()),
723                        args: vec![NativeValue::StrRef(s.clone())],
724                        ret_type: NativeType::Ptr,
725                    });
726                    if self.config.emit_comments {
727                        block.push_inst(NativeInst::Comment(format!("string: \"{}\"", s)));
728                    }
729                    self.stats.instructions_generated += 1;
730                }
731            },
732            LcnfLetValue::Erased => {
733                block.push_inst(NativeInst::LoadImm {
734                    dst,
735                    ty: NativeType::Ptr,
736                    value: 0,
737                });
738                self.stats.instructions_generated += 1;
739            }
740            LcnfLetValue::FVar(var) => {
741                let src = self.get_var_reg(*var);
742                block.push_inst(NativeInst::Copy {
743                    dst,
744                    src: NativeValue::Reg(src),
745                });
746                self.stats.instructions_generated += 1;
747            }
748            LcnfLetValue::Reset(var) => {
749                let obj_reg = self.get_var_reg(*var);
750                block.push_inst(NativeInst::Call {
751                    dst: Some(dst),
752                    func: NativeValue::FRef("lean_obj_reset".to_string()),
753                    args: vec![NativeValue::Reg(obj_reg)],
754                    ret_type: NativeType::Ptr,
755                });
756                self.stats.instructions_generated += 1;
757            }
758            LcnfLetValue::Reuse(slot, _name, tag, args) => {
759                let num_objs = args.len();
760                let slot_reg = self.get_var_reg(*slot);
761                block.push_inst(NativeInst::Call {
762                    dst: Some(dst),
763                    func: NativeValue::FRef("lean_alloc_ctor_using".to_string()),
764                    args: vec![
765                        NativeValue::Reg(slot_reg),
766                        NativeValue::Imm(*tag as i64),
767                        NativeValue::Imm(num_objs as i64),
768                        NativeValue::Imm(0),
769                    ],
770                    ret_type: NativeType::Ptr,
771                });
772                self.stats.instructions_generated += 1;
773                for (i, arg) in args.iter().enumerate() {
774                    let val = self.compile_arg_to_value(arg);
775                    block.push_inst(NativeInst::Call {
776                        dst: None,
777                        func: NativeValue::FRef("lean_ctor_set".to_string()),
778                        args: vec![NativeValue::Reg(dst), NativeValue::Imm(i as i64), val],
779                        ret_type: NativeType::Void,
780                    });
781                    self.stats.instructions_generated += 1;
782                }
783            }
784        }
785    }
786    /// Bind constructor parameters to registers by extracting fields.
787    pub(super) fn bind_ctor_params(
788        &mut self,
789        params: &[LcnfParam],
790        scrut_reg: Register,
791        block: &mut BasicBlock,
792    ) {
793        for (i, param) in params.iter().enumerate() {
794            if param.erased {
795                continue;
796            }
797            let dst = self.alloc_vreg();
798            self.var_map.insert(param.id, dst);
799            block.push_inst(NativeInst::Call {
800                dst: Some(dst),
801                func: NativeValue::FRef("lean_ctor_get".to_string()),
802                args: vec![NativeValue::Reg(scrut_reg), NativeValue::Imm(i as i64)],
803                ret_type: NativeType::Ptr,
804            });
805            self.stats.instructions_generated += 1;
806        }
807    }
808    /// Compile an LCNF argument to a NativeValue, possibly emitting instructions.
809    pub(super) fn compile_arg(&mut self, arg: &LcnfArg, block: &mut BasicBlock) -> NativeValue {
810        match arg {
811            LcnfArg::Var(id) => {
812                let r = self.get_var_reg(*id);
813                NativeValue::Reg(r)
814            }
815            LcnfArg::Lit(lit) => match lit {
816                LcnfLit::Nat(n) => {
817                    let r = self.alloc_vreg();
818                    block.push_inst(NativeInst::LoadImm {
819                        dst: r,
820                        ty: NativeType::I64,
821                        value: *n as i64,
822                    });
823                    self.stats.instructions_generated += 1;
824                    NativeValue::Reg(r)
825                }
826                LcnfLit::Str(s) => {
827                    let r = self.alloc_vreg();
828                    block.push_inst(NativeInst::Call {
829                        dst: Some(r),
830                        func: NativeValue::FRef("lean_mk_string".to_string()),
831                        args: vec![NativeValue::StrRef(s.clone())],
832                        ret_type: NativeType::Ptr,
833                    });
834                    self.stats.instructions_generated += 1;
835                    NativeValue::Reg(r)
836                }
837            },
838            LcnfArg::Erased | LcnfArg::Type(_) => NativeValue::Imm(0),
839        }
840    }
841    /// Compile an LCNF argument to a NativeValue without emitting instructions.
842    pub(super) fn compile_arg_to_value(&mut self, arg: &LcnfArg) -> NativeValue {
843        match arg {
844            LcnfArg::Var(id) => NativeValue::Reg(self.get_var_reg(*id)),
845            LcnfArg::Lit(LcnfLit::Nat(n)) => NativeValue::Imm(*n as i64),
846            LcnfArg::Lit(LcnfLit::Str(s)) => NativeValue::StrRef(s.clone()),
847            LcnfArg::Erased | LcnfArg::Type(_) => NativeValue::Imm(0),
848        }
849    }
850    /// Get accumulated statistics.
851    pub fn stats(&self) -> &NativeEmitStats {
852        &self.stats
853    }
854}
855/// Live interval for a virtual register.
856#[derive(Debug, Clone)]
857struct LiveInterval {
858    pub(super) vreg: Register,
859    pub(super) start: usize,
860    pub(super) end: usize,
861    pub(super) assigned_phys: Option<Register>,
862    pub(super) spill_slot: Option<u32>,
863}
864#[allow(dead_code)]
865#[derive(Debug, Clone, PartialEq)]
866pub enum NatPassPhase {
867    Analysis,
868    Transformation,
869    Verification,
870    Cleanup,
871}
872impl NatPassPhase {
873    #[allow(dead_code)]
874    pub fn name(&self) -> &str {
875        match self {
876            NatPassPhase::Analysis => "analysis",
877            NatPassPhase::Transformation => "transformation",
878            NatPassPhase::Verification => "verification",
879            NatPassPhase::Cleanup => "cleanup",
880        }
881    }
882    #[allow(dead_code)]
883    pub fn is_modifying(&self) -> bool {
884        matches!(self, NatPassPhase::Transformation | NatPassPhase::Cleanup)
885    }
886}
887#[allow(dead_code)]
888pub struct NatPassRegistry {
889    pub(super) configs: Vec<NatPassConfig>,
890    pub(super) stats: std::collections::HashMap<String, NatPassStats>,
891}
892impl NatPassRegistry {
893    #[allow(dead_code)]
894    pub fn new() -> Self {
895        NatPassRegistry {
896            configs: Vec::new(),
897            stats: std::collections::HashMap::new(),
898        }
899    }
900    #[allow(dead_code)]
901    pub fn register(&mut self, config: NatPassConfig) {
902        self.stats
903            .insert(config.pass_name.clone(), NatPassStats::new());
904        self.configs.push(config);
905    }
906    #[allow(dead_code)]
907    pub fn enabled_passes(&self) -> Vec<&NatPassConfig> {
908        self.configs.iter().filter(|c| c.enabled).collect()
909    }
910    #[allow(dead_code)]
911    pub fn get_stats(&self, name: &str) -> Option<&NatPassStats> {
912        self.stats.get(name)
913    }
914    #[allow(dead_code)]
915    pub fn total_passes(&self) -> usize {
916        self.configs.len()
917    }
918    #[allow(dead_code)]
919    pub fn enabled_count(&self) -> usize {
920        self.enabled_passes().len()
921    }
922    #[allow(dead_code)]
923    pub fn update_stats(&mut self, name: &str, changes: u64, time_ms: u64, iter: u32) {
924        if let Some(stats) = self.stats.get_mut(name) {
925            stats.record_run(changes, time_ms, iter);
926        }
927    }
928}
929/// A value operand in the native IR.
930#[derive(Debug, Clone, PartialEq, Eq, Hash)]
931pub enum NativeValue {
932    /// A register (virtual or physical).
933    Reg(Register),
934    /// An immediate constant.
935    Imm(i64),
936    /// A function reference (by name).
937    FRef(String),
938    /// A stack slot.
939    StackSlot(u32),
940    /// An unsigned immediate.
941    UImm(u64),
942    /// A string literal reference (pointer to the string bytes).
943    StrRef(String),
944}
945impl NativeValue {
946    /// Create a register value.
947    pub(super) fn reg(r: Register) -> Self {
948        NativeValue::Reg(r)
949    }
950    /// Create an immediate value.
951    pub(super) fn imm(n: i64) -> Self {
952        NativeValue::Imm(n)
953    }
954    /// Create an unsigned immediate value.
955    pub(super) fn uimm(n: u64) -> Self {
956        NativeValue::UImm(n)
957    }
958}
959/// A basic block in the native IR.
960#[derive(Debug, Clone)]
961pub struct BasicBlock {
962    /// The block identifier.
963    pub label: BlockId,
964    /// Block parameters (for SSA phi-elimination).
965    pub params: Vec<(Register, NativeType)>,
966    /// The instructions in this block (excluding the terminator).
967    pub instructions: Vec<NativeInst>,
968    /// The terminator instruction.
969    pub terminator: Option<NativeInst>,
970}
971impl BasicBlock {
972    /// Create a new empty basic block.
973    pub(super) fn new(label: BlockId) -> Self {
974        BasicBlock {
975            label,
976            params: Vec::new(),
977            instructions: Vec::new(),
978            terminator: None,
979        }
980    }
981    /// Append an instruction to this block.
982    pub(super) fn push_inst(&mut self, inst: NativeInst) {
983        if inst.is_terminator() {
984            self.terminator = Some(inst);
985        } else {
986            self.instructions.push(inst);
987        }
988    }
989    /// Get the successor block IDs of this block.
990    pub fn successors(&self) -> Vec<BlockId> {
991        match &self.terminator {
992            Some(NativeInst::Br { target }) => vec![*target],
993            Some(NativeInst::CondBr {
994                then_target,
995                else_target,
996                ..
997            }) => {
998                vec![*then_target, *else_target]
999            }
1000            Some(NativeInst::Switch {
1001                default, targets, ..
1002            }) => {
1003                let mut succs = vec![*default];
1004                for (_, t) in targets {
1005                    succs.push(*t);
1006                }
1007                succs
1008            }
1009            _ => vec![],
1010        }
1011    }
1012    /// Total number of instructions (including terminator).
1013    pub fn instruction_count(&self) -> usize {
1014        self.instructions.len() + if self.terminator.is_some() { 1 } else { 0 }
1015    }
1016}
1017#[allow(dead_code)]
1018#[derive(Debug, Clone)]
1019pub struct NatCacheEntry {
1020    pub key: String,
1021    pub data: Vec<u8>,
1022    pub timestamp: u64,
1023    pub valid: bool,
1024}
1025/// A function in the native IR.
1026#[derive(Debug, Clone)]
1027pub struct NativeFunc {
1028    /// Function name.
1029    pub name: String,
1030    /// Parameter registers and types.
1031    pub params: Vec<(Register, NativeType)>,
1032    /// Return type.
1033    pub ret_type: NativeType,
1034    /// Basic blocks (entry is blocks\[0\]).
1035    pub blocks: Vec<BasicBlock>,
1036    /// Total stack size needed.
1037    pub stack_size: usize,
1038    /// Whether this function is recursive.
1039    pub is_recursive: bool,
1040}
1041impl NativeFunc {
1042    /// Create a new function.
1043    pub(super) fn new(
1044        name: &str,
1045        params: Vec<(Register, NativeType)>,
1046        ret_type: NativeType,
1047    ) -> Self {
1048        NativeFunc {
1049            name: name.to_string(),
1050            params,
1051            ret_type,
1052            blocks: Vec::new(),
1053            stack_size: 0,
1054            is_recursive: false,
1055        }
1056    }
1057    /// Get the entry block.
1058    pub fn entry_block(&self) -> Option<&BasicBlock> {
1059        self.blocks.first()
1060    }
1061    /// Total number of instructions across all blocks.
1062    pub fn instruction_count(&self) -> usize {
1063        self.blocks.iter().map(|b| b.instruction_count()).sum()
1064    }
1065    /// Get a block by ID.
1066    pub fn get_block(&self, id: BlockId) -> Option<&BasicBlock> {
1067        self.blocks.iter().find(|b| b.label == id)
1068    }
1069    /// Get all virtual registers used in this function.
1070    pub fn virtual_registers(&self) -> HashSet<Register> {
1071        let mut regs = HashSet::new();
1072        for (r, _) in &self.params {
1073            if r.is_virtual() {
1074                regs.insert(*r);
1075            }
1076        }
1077        for block in &self.blocks {
1078            for (r, _) in &block.params {
1079                if r.is_virtual() {
1080                    regs.insert(*r);
1081                }
1082            }
1083            for inst in &block.instructions {
1084                if let Some(dst) = inst.dst_reg() {
1085                    if dst.is_virtual() {
1086                        regs.insert(dst);
1087                    }
1088                }
1089                for src in inst.src_regs() {
1090                    if src.is_virtual() {
1091                        regs.insert(src);
1092                    }
1093                }
1094            }
1095            if let Some(term) = &block.terminator {
1096                if let Some(dst) = term.dst_reg() {
1097                    if dst.is_virtual() {
1098                        regs.insert(dst);
1099                    }
1100                }
1101                for src in term.src_regs() {
1102                    if src.is_virtual() {
1103                        regs.insert(src);
1104                    }
1105                }
1106            }
1107        }
1108        regs
1109    }
1110}
1111/// A single instruction in the native IR.
1112#[derive(Debug, Clone, PartialEq)]
1113pub enum NativeInst {
1114    /// Load immediate: `dst = imm`.
1115    LoadImm {
1116        dst: Register,
1117        ty: NativeType,
1118        value: i64,
1119    },
1120    /// Add: `dst = lhs + rhs`.
1121    Add {
1122        dst: Register,
1123        ty: NativeType,
1124        lhs: NativeValue,
1125        rhs: NativeValue,
1126    },
1127    /// Subtract: `dst = lhs - rhs`.
1128    Sub {
1129        dst: Register,
1130        ty: NativeType,
1131        lhs: NativeValue,
1132        rhs: NativeValue,
1133    },
1134    /// Multiply: `dst = lhs * rhs`.
1135    Mul {
1136        dst: Register,
1137        ty: NativeType,
1138        lhs: NativeValue,
1139        rhs: NativeValue,
1140    },
1141    /// Divide: `dst = lhs / rhs`.
1142    Div {
1143        dst: Register,
1144        ty: NativeType,
1145        lhs: NativeValue,
1146        rhs: NativeValue,
1147    },
1148    /// Bitwise AND: `dst = lhs & rhs`.
1149    And {
1150        dst: Register,
1151        ty: NativeType,
1152        lhs: NativeValue,
1153        rhs: NativeValue,
1154    },
1155    /// Bitwise OR: `dst = lhs | rhs`.
1156    Or {
1157        dst: Register,
1158        ty: NativeType,
1159        lhs: NativeValue,
1160        rhs: NativeValue,
1161    },
1162    /// Bitwise XOR: `dst = lhs ^ rhs`.
1163    Xor {
1164        dst: Register,
1165        ty: NativeType,
1166        lhs: NativeValue,
1167        rhs: NativeValue,
1168    },
1169    /// Shift left: `dst = lhs << rhs`.
1170    Shl {
1171        dst: Register,
1172        ty: NativeType,
1173        lhs: NativeValue,
1174        rhs: NativeValue,
1175    },
1176    /// Shift right (arithmetic): `dst = lhs >> rhs`.
1177    Shr {
1178        dst: Register,
1179        ty: NativeType,
1180        lhs: NativeValue,
1181        rhs: NativeValue,
1182    },
1183    /// Compare: `dst = cmp(cc, lhs, rhs)`.
1184    Cmp {
1185        dst: Register,
1186        cc: CondCode,
1187        ty: NativeType,
1188        lhs: NativeValue,
1189        rhs: NativeValue,
1190    },
1191    /// Unconditional branch to a block.
1192    Br { target: BlockId },
1193    /// Conditional branch: if cond != 0, goto then_target, else goto else_target.
1194    CondBr {
1195        cond: NativeValue,
1196        then_target: BlockId,
1197        else_target: BlockId,
1198    },
1199    /// Function call: `dst = func(args...)`.
1200    Call {
1201        dst: Option<Register>,
1202        func: NativeValue,
1203        args: Vec<NativeValue>,
1204        ret_type: NativeType,
1205    },
1206    /// Return from function.
1207    Ret { value: Option<NativeValue> },
1208    /// Load from memory: `dst = *addr`.
1209    Load {
1210        dst: Register,
1211        ty: NativeType,
1212        addr: NativeValue,
1213    },
1214    /// Store to memory: `*addr = value`.
1215    Store {
1216        ty: NativeType,
1217        addr: NativeValue,
1218        value: NativeValue,
1219    },
1220    /// Allocate stack space: `dst = alloca(size, align)`.
1221    Alloc {
1222        dst: Register,
1223        size: usize,
1224        align: usize,
1225    },
1226    /// Free heap memory.
1227    Free { ptr: NativeValue },
1228    /// Phi node (SSA): `dst = phi([(val, block), ...])`.
1229    Phi {
1230        dst: Register,
1231        ty: NativeType,
1232        incoming: Vec<(NativeValue, BlockId)>,
1233    },
1234    /// Select: `dst = cond ? true_val : false_val`.
1235    Select {
1236        dst: Register,
1237        ty: NativeType,
1238        cond: NativeValue,
1239        true_val: NativeValue,
1240        false_val: NativeValue,
1241    },
1242    /// Copy: `dst = src`.
1243    Copy { dst: Register, src: NativeValue },
1244    /// No-operation (placeholder).
1245    Nop,
1246    /// Comment (for debugging).
1247    Comment(String),
1248    /// Integer to pointer cast.
1249    IntToPtr { dst: Register, src: NativeValue },
1250    /// Pointer to integer cast.
1251    PtrToInt { dst: Register, src: NativeValue },
1252    /// Get element pointer (pointer arithmetic).
1253    GetElementPtr {
1254        dst: Register,
1255        base: NativeValue,
1256        offset: NativeValue,
1257        elem_size: usize,
1258    },
1259    /// Switch/table branch.
1260    Switch {
1261        value: NativeValue,
1262        default: BlockId,
1263        targets: Vec<(u64, BlockId)>,
1264    },
1265}
1266impl NativeInst {
1267    /// Get the destination register of this instruction, if any.
1268    pub fn dst_reg(&self) -> Option<Register> {
1269        match self {
1270            NativeInst::LoadImm { dst, .. }
1271            | NativeInst::Add { dst, .. }
1272            | NativeInst::Sub { dst, .. }
1273            | NativeInst::Mul { dst, .. }
1274            | NativeInst::Div { dst, .. }
1275            | NativeInst::And { dst, .. }
1276            | NativeInst::Or { dst, .. }
1277            | NativeInst::Xor { dst, .. }
1278            | NativeInst::Shl { dst, .. }
1279            | NativeInst::Shr { dst, .. }
1280            | NativeInst::Cmp { dst, .. }
1281            | NativeInst::Load { dst, .. }
1282            | NativeInst::Alloc { dst, .. }
1283            | NativeInst::Phi { dst, .. }
1284            | NativeInst::Select { dst, .. }
1285            | NativeInst::Copy { dst, .. }
1286            | NativeInst::IntToPtr { dst, .. }
1287            | NativeInst::PtrToInt { dst, .. }
1288            | NativeInst::GetElementPtr { dst, .. } => Some(*dst),
1289            NativeInst::Call { dst, .. } => *dst,
1290            _ => None,
1291        }
1292    }
1293    /// Get all source registers read by this instruction.
1294    pub fn src_regs(&self) -> Vec<Register> {
1295        let mut regs = Vec::new();
1296        let extract = |v: &NativeValue, regs: &mut Vec<Register>| {
1297            if let NativeValue::Reg(r) = v {
1298                regs.push(*r);
1299            }
1300        };
1301        match self {
1302            NativeInst::Add { lhs, rhs, .. }
1303            | NativeInst::Sub { lhs, rhs, .. }
1304            | NativeInst::Mul { lhs, rhs, .. }
1305            | NativeInst::Div { lhs, rhs, .. }
1306            | NativeInst::And { lhs, rhs, .. }
1307            | NativeInst::Or { lhs, rhs, .. }
1308            | NativeInst::Xor { lhs, rhs, .. }
1309            | NativeInst::Shl { lhs, rhs, .. }
1310            | NativeInst::Shr { lhs, rhs, .. }
1311            | NativeInst::Cmp { lhs, rhs, .. } => {
1312                extract(lhs, &mut regs);
1313                extract(rhs, &mut regs);
1314            }
1315            NativeInst::CondBr { cond, .. } => extract(cond, &mut regs),
1316            NativeInst::Call { func, args, .. } => {
1317                extract(func, &mut regs);
1318                for a in args {
1319                    extract(a, &mut regs);
1320                }
1321            }
1322            NativeInst::Ret { value: Some(v) } => extract(v, &mut regs),
1323            NativeInst::Load { addr, .. } => extract(addr, &mut regs),
1324            NativeInst::Store { addr, value, .. } => {
1325                extract(addr, &mut regs);
1326                extract(value, &mut regs);
1327            }
1328            NativeInst::Free { ptr } => extract(ptr, &mut regs),
1329            NativeInst::Phi { incoming, .. } => {
1330                for (v, _) in incoming {
1331                    extract(v, &mut regs);
1332                }
1333            }
1334            NativeInst::Select {
1335                cond,
1336                true_val,
1337                false_val,
1338                ..
1339            } => {
1340                extract(cond, &mut regs);
1341                extract(true_val, &mut regs);
1342                extract(false_val, &mut regs);
1343            }
1344            NativeInst::Copy { src, .. } => extract(src, &mut regs),
1345            NativeInst::IntToPtr { src, .. } | NativeInst::PtrToInt { src, .. } => {
1346                extract(src, &mut regs)
1347            }
1348            NativeInst::GetElementPtr { base, offset, .. } => {
1349                extract(base, &mut regs);
1350                extract(offset, &mut regs);
1351            }
1352            NativeInst::Switch { value, .. } => extract(value, &mut regs),
1353            _ => {}
1354        }
1355        regs
1356    }
1357    /// Whether this instruction is a terminator (ends a basic block).
1358    pub fn is_terminator(&self) -> bool {
1359        matches!(
1360            self,
1361            NativeInst::Br { .. }
1362                | NativeInst::CondBr { .. }
1363                | NativeInst::Ret { .. }
1364                | NativeInst::Switch { .. }
1365        )
1366    }
1367}
1368/// Statistics from native code generation.
1369#[derive(Debug, Clone, Default)]
1370pub struct NativeEmitStats {
1371    /// Number of functions compiled.
1372    pub functions_compiled: usize,
1373    /// Number of basic blocks generated.
1374    pub blocks_generated: usize,
1375    /// Number of instructions generated.
1376    pub instructions_generated: usize,
1377    /// Number of virtual registers used.
1378    pub virtual_regs_used: usize,
1379    /// Number of stack slots allocated.
1380    pub stack_slots_allocated: usize,
1381    /// Number of spills during register allocation.
1382    pub spills: usize,
1383}
1384/// Machine-level types for the native IR.
1385#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
1386pub enum NativeType {
1387    /// 8-bit integer.
1388    I8,
1389    /// 16-bit integer.
1390    I16,
1391    /// 32-bit integer.
1392    I32,
1393    /// 64-bit integer.
1394    I64,
1395    /// 32-bit floating point.
1396    F32,
1397    /// 64-bit floating point.
1398    F64,
1399    /// Pointer (word-sized).
1400    Ptr,
1401    /// Function reference.
1402    FuncRef,
1403    /// Void (no value).
1404    Void,
1405}
1406impl NativeType {
1407    /// Size in bytes of this type.
1408    pub fn size_bytes(&self) -> usize {
1409        match self {
1410            NativeType::I8 => 1,
1411            NativeType::I16 => 2,
1412            NativeType::I32 | NativeType::F32 => 4,
1413            NativeType::I64 | NativeType::F64 | NativeType::Ptr | NativeType::FuncRef => 8,
1414            NativeType::Void => 0,
1415        }
1416    }
1417    /// Alignment in bytes.
1418    pub fn alignment(&self) -> usize {
1419        self.size_bytes().max(1)
1420    }
1421    /// Whether this is a floating-point type.
1422    pub fn is_float(&self) -> bool {
1423        matches!(self, NativeType::F32 | NativeType::F64)
1424    }
1425    /// Whether this is an integer type.
1426    pub fn is_integer(&self) -> bool {
1427        matches!(
1428            self,
1429            NativeType::I8 | NativeType::I16 | NativeType::I32 | NativeType::I64
1430        )
1431    }
1432    /// Whether this is a pointer type.
1433    pub fn is_pointer(&self) -> bool {
1434        matches!(self, NativeType::Ptr | NativeType::FuncRef)
1435    }
1436}
1437/// Comparison condition code.
1438#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
1439pub enum CondCode {
1440    Eq,
1441    Ne,
1442    Lt,
1443    Le,
1444    Gt,
1445    Ge,
1446    /// Unsigned less than.
1447    Ult,
1448    /// Unsigned less or equal.
1449    Ule,
1450    /// Unsigned greater than.
1451    Ugt,
1452    /// Unsigned greater or equal.
1453    Uge,
1454}
1455#[allow(dead_code)]
1456#[derive(Debug, Clone)]
1457pub struct NatPassConfig {
1458    pub phase: NatPassPhase,
1459    pub enabled: bool,
1460    pub max_iterations: u32,
1461    pub debug_output: bool,
1462    pub pass_name: String,
1463}
1464impl NatPassConfig {
1465    #[allow(dead_code)]
1466    pub fn new(name: impl Into<String>, phase: NatPassPhase) -> Self {
1467        NatPassConfig {
1468            phase,
1469            enabled: true,
1470            max_iterations: 10,
1471            debug_output: false,
1472            pass_name: name.into(),
1473        }
1474    }
1475    #[allow(dead_code)]
1476    pub fn disabled(mut self) -> Self {
1477        self.enabled = false;
1478        self
1479    }
1480    #[allow(dead_code)]
1481    pub fn with_debug(mut self) -> Self {
1482        self.debug_output = true;
1483        self
1484    }
1485    #[allow(dead_code)]
1486    pub fn max_iter(mut self, n: u32) -> Self {
1487        self.max_iterations = n;
1488        self
1489    }
1490}
1491#[allow(dead_code)]
1492#[derive(Debug, Clone)]
1493pub struct NatAnalysisCache {
1494    pub(super) entries: std::collections::HashMap<String, NatCacheEntry>,
1495    pub(super) max_size: usize,
1496    pub(super) hits: u64,
1497    pub(super) misses: u64,
1498}
1499impl NatAnalysisCache {
1500    #[allow(dead_code)]
1501    pub fn new(max_size: usize) -> Self {
1502        NatAnalysisCache {
1503            entries: std::collections::HashMap::new(),
1504            max_size,
1505            hits: 0,
1506            misses: 0,
1507        }
1508    }
1509    #[allow(dead_code)]
1510    pub fn get(&mut self, key: &str) -> Option<&NatCacheEntry> {
1511        if self.entries.contains_key(key) {
1512            self.hits += 1;
1513            self.entries.get(key)
1514        } else {
1515            self.misses += 1;
1516            None
1517        }
1518    }
1519    #[allow(dead_code)]
1520    pub fn insert(&mut self, key: String, data: Vec<u8>) {
1521        if self.entries.len() >= self.max_size {
1522            if let Some(oldest) = self.entries.keys().next().cloned() {
1523                self.entries.remove(&oldest);
1524            }
1525        }
1526        self.entries.insert(
1527            key.clone(),
1528            NatCacheEntry {
1529                key,
1530                data,
1531                timestamp: 0,
1532                valid: true,
1533            },
1534        );
1535    }
1536    #[allow(dead_code)]
1537    pub fn invalidate(&mut self, key: &str) {
1538        if let Some(entry) = self.entries.get_mut(key) {
1539            entry.valid = false;
1540        }
1541    }
1542    #[allow(dead_code)]
1543    pub fn clear(&mut self) {
1544        self.entries.clear();
1545    }
1546    #[allow(dead_code)]
1547    pub fn hit_rate(&self) -> f64 {
1548        let total = self.hits + self.misses;
1549        if total == 0 {
1550            return 0.0;
1551        }
1552        self.hits as f64 / total as f64
1553    }
1554    #[allow(dead_code)]
1555    pub fn size(&self) -> usize {
1556        self.entries.len()
1557    }
1558}
1559/// Linear scan register allocator.
1560///
1561/// Takes a NativeFunc with virtual registers and assigns physical registers
1562/// using a simple linear scan algorithm.
1563pub struct RegisterAllocator {
1564    /// Number of available physical registers.
1565    pub(super) num_phys_regs: usize,
1566    /// Live intervals computed during analysis.
1567    pub(super) intervals: Vec<LiveInterval>,
1568    /// Active intervals (sorted by end point).
1569    pub(super) active: Vec<usize>,
1570    /// Next spill slot to allocate.
1571    pub(super) next_spill: u32,
1572    /// Set of free physical registers.
1573    pub(super) free_regs: Vec<bool>,
1574    /// Total number of spills performed.
1575    pub(super) spill_count: usize,
1576}
1577impl RegisterAllocator {
1578    /// Create a new register allocator.
1579    pub fn new(num_phys_regs: usize) -> Self {
1580        RegisterAllocator {
1581            num_phys_regs,
1582            intervals: Vec::new(),
1583            active: Vec::new(),
1584            next_spill: 0,
1585            free_regs: vec![true; num_phys_regs],
1586            spill_count: 0,
1587        }
1588    }
1589    /// Perform register allocation on a function.
1590    pub fn allocate(&mut self, func: &NativeFunc) -> HashMap<Register, Register> {
1591        self.intervals.clear();
1592        self.active.clear();
1593        self.free_regs = vec![true; self.num_phys_regs];
1594        self.spill_count = 0;
1595        self.compute_live_intervals(func);
1596        self.intervals.sort_by_key(|i| i.start);
1597        let mut assignment: HashMap<Register, Register> = HashMap::new();
1598        for idx in 0..self.intervals.len() {
1599            let current_start = self.intervals[idx].start;
1600            self.expire_old_intervals(current_start);
1601            if let Some(phys_idx) = self.find_free_reg() {
1602                self.intervals[idx].assigned_phys = Some(Register::phys(phys_idx as u32));
1603                self.free_regs[phys_idx] = false;
1604                self.active.push(idx);
1605                self.active.sort_by_key(|&i| self.intervals[i].end);
1606                assignment.insert(self.intervals[idx].vreg, Register::phys(phys_idx as u32));
1607            } else {
1608                self.spill_at_interval(idx, &mut assignment);
1609            }
1610        }
1611        assignment
1612    }
1613    /// Compute live intervals for all virtual registers.
1614    pub(super) fn compute_live_intervals(&mut self, func: &NativeFunc) {
1615        let mut intervals_map: HashMap<Register, (usize, usize)> = HashMap::new();
1616        let mut position = 0usize;
1617        for block in &func.blocks {
1618            for inst in &block.instructions {
1619                if let Some(dst) = inst.dst_reg() {
1620                    if dst.is_virtual() {
1621                        intervals_map
1622                            .entry(dst)
1623                            .and_modify(|(_s, e)| *e = position)
1624                            .or_insert((position, position));
1625                    }
1626                }
1627                for src in inst.src_regs() {
1628                    if src.is_virtual() {
1629                        intervals_map
1630                            .entry(src)
1631                            .and_modify(|(_s, e)| *e = position)
1632                            .or_insert((position, position));
1633                    }
1634                }
1635                position += 1;
1636            }
1637            if let Some(term) = &block.terminator {
1638                if let Some(dst) = term.dst_reg() {
1639                    if dst.is_virtual() {
1640                        intervals_map
1641                            .entry(dst)
1642                            .and_modify(|(_s, e)| *e = position)
1643                            .or_insert((position, position));
1644                    }
1645                }
1646                for src in term.src_regs() {
1647                    if src.is_virtual() {
1648                        intervals_map
1649                            .entry(src)
1650                            .and_modify(|(_s, e)| *e = position)
1651                            .or_insert((position, position));
1652                    }
1653                }
1654                position += 1;
1655            }
1656        }
1657        for (r, _) in &func.params {
1658            if r.is_virtual() {
1659                intervals_map
1660                    .entry(*r)
1661                    .and_modify(|(s, _e)| *s = 0)
1662                    .or_insert((0, 0));
1663            }
1664        }
1665        self.intervals = intervals_map
1666            .into_iter()
1667            .map(|(vreg, (start, end))| LiveInterval {
1668                vreg,
1669                start,
1670                end,
1671                assigned_phys: None,
1672                spill_slot: None,
1673            })
1674            .collect();
1675    }
1676    /// Expire intervals that end before the current position.
1677    pub(super) fn expire_old_intervals(&mut self, current_start: usize) {
1678        let mut to_remove = Vec::new();
1679        for (active_idx, &interval_idx) in self.active.iter().enumerate() {
1680            if self.intervals[interval_idx].end < current_start {
1681                if let Some(phys) = self.intervals[interval_idx].assigned_phys {
1682                    let phys_idx = (phys.0 - VIRT_PHYS_BOUNDARY) as usize;
1683                    if phys_idx < self.free_regs.len() {
1684                        self.free_regs[phys_idx] = true;
1685                    }
1686                }
1687                to_remove.push(active_idx);
1688            }
1689        }
1690        for idx in to_remove.into_iter().rev() {
1691            self.active.remove(idx);
1692        }
1693    }
1694    /// Find a free physical register.
1695    pub(super) fn find_free_reg(&self) -> Option<usize> {
1696        self.free_regs.iter().position(|&free| free)
1697    }
1698    /// Spill at the current interval.
1699    pub(super) fn spill_at_interval(
1700        &mut self,
1701        current_idx: usize,
1702        assignment: &mut HashMap<Register, Register>,
1703    ) {
1704        if !self.active.is_empty() {
1705            let last_active_idx = *self
1706                .active
1707                .last()
1708                .expect("active is non-empty; guaranteed by !self.active.is_empty() guard");
1709            if self.intervals[last_active_idx].end > self.intervals[current_idx].end {
1710                let phys = self
1711                    .intervals[last_active_idx]
1712                    .assigned_phys
1713                    .expect(
1714                        "last active interval must have an assigned physical register; invariant of linear scan regalloc",
1715                    );
1716                self.intervals[current_idx].assigned_phys = Some(phys);
1717                assignment.insert(self.intervals[current_idx].vreg, phys);
1718                let spill_slot = self.next_spill;
1719                self.next_spill += 1;
1720                self.intervals[last_active_idx].assigned_phys = None;
1721                self.intervals[last_active_idx].spill_slot = Some(spill_slot);
1722                assignment.remove(&self.intervals[last_active_idx].vreg);
1723                self.active.pop();
1724                self.active.push(current_idx);
1725                self.active.sort_by_key(|&i| self.intervals[i].end);
1726                self.spill_count += 1;
1727                return;
1728            }
1729        }
1730        let spill_slot = self.next_spill;
1731        self.next_spill += 1;
1732        self.intervals[current_idx].spill_slot = Some(spill_slot);
1733        self.spill_count += 1;
1734    }
1735    /// Get the number of spills performed.
1736    pub fn spill_count(&self) -> usize {
1737        self.spill_count
1738    }
1739}
1740#[allow(dead_code)]
1741#[derive(Debug, Clone)]
1742pub struct NatWorklist {
1743    pub(super) items: std::collections::VecDeque<u32>,
1744    pub(super) in_worklist: std::collections::HashSet<u32>,
1745}
1746impl NatWorklist {
1747    #[allow(dead_code)]
1748    pub fn new() -> Self {
1749        NatWorklist {
1750            items: std::collections::VecDeque::new(),
1751            in_worklist: std::collections::HashSet::new(),
1752        }
1753    }
1754    #[allow(dead_code)]
1755    pub fn push(&mut self, item: u32) -> bool {
1756        if self.in_worklist.insert(item) {
1757            self.items.push_back(item);
1758            true
1759        } else {
1760            false
1761        }
1762    }
1763    #[allow(dead_code)]
1764    pub fn pop(&mut self) -> Option<u32> {
1765        let item = self.items.pop_front()?;
1766        self.in_worklist.remove(&item);
1767        Some(item)
1768    }
1769    #[allow(dead_code)]
1770    pub fn is_empty(&self) -> bool {
1771        self.items.is_empty()
1772    }
1773    #[allow(dead_code)]
1774    pub fn len(&self) -> usize {
1775        self.items.len()
1776    }
1777    #[allow(dead_code)]
1778    pub fn contains(&self, item: u32) -> bool {
1779        self.in_worklist.contains(&item)
1780    }
1781}