Skip to main content

oxilean_codegen/opt_reuse/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5use crate::lcnf::*;
6use crate::opt_licm::LicmHoistCandidate;
7use std::collections::{HashMap, HashSet};
8
9use std::collections::VecDeque;
10
11/// Reuse analysis pass summary
12#[allow(dead_code)]
13#[derive(Debug, Clone)]
14pub struct ReusePassSummary {
15    pub pass_name: String,
16    pub functions_analyzed: usize,
17    pub reuses_applied: usize,
18    pub stack_allocations: usize,
19    pub bytes_saved: u64,
20    pub duration_us: u64,
21}
22#[allow(dead_code)]
23#[derive(Debug, Clone)]
24pub struct ORLivenessInfo {
25    pub live_in: Vec<std::collections::HashSet<u32>>,
26    pub live_out: Vec<std::collections::HashSet<u32>>,
27    pub defs: Vec<std::collections::HashSet<u32>>,
28    pub uses: Vec<std::collections::HashSet<u32>>,
29}
30impl ORLivenessInfo {
31    #[allow(dead_code)]
32    pub fn new(block_count: usize) -> Self {
33        ORLivenessInfo {
34            live_in: vec![std::collections::HashSet::new(); block_count],
35            live_out: vec![std::collections::HashSet::new(); block_count],
36            defs: vec![std::collections::HashSet::new(); block_count],
37            uses: vec![std::collections::HashSet::new(); block_count],
38        }
39    }
40    #[allow(dead_code)]
41    pub fn add_def(&mut self, block: usize, var: u32) {
42        if block < self.defs.len() {
43            self.defs[block].insert(var);
44        }
45    }
46    #[allow(dead_code)]
47    pub fn add_use(&mut self, block: usize, var: u32) {
48        if block < self.uses.len() {
49            self.uses[block].insert(var);
50        }
51    }
52    #[allow(dead_code)]
53    pub fn is_live_in(&self, block: usize, var: u32) -> bool {
54        self.live_in
55            .get(block)
56            .map(|s| s.contains(&var))
57            .unwrap_or(false)
58    }
59    #[allow(dead_code)]
60    pub fn is_live_out(&self, block: usize, var: u32) -> bool {
61        self.live_out
62            .get(block)
63            .map(|s| s.contains(&var))
64            .unwrap_or(false)
65    }
66}
67/// Reuse analysis interference graph
68#[allow(dead_code)]
69#[derive(Debug, Default)]
70pub struct ReuseInterferenceGraph {
71    pub num_nodes: usize,
72    pub edges: std::collections::HashSet<(u32, u32)>,
73}
74#[allow(dead_code)]
75impl ReuseInterferenceGraph {
76    pub fn new(n: usize) -> Self {
77        Self {
78            num_nodes: n,
79            edges: std::collections::HashSet::new(),
80        }
81    }
82    pub fn add_edge(&mut self, a: u32, b: u32) {
83        let key = if a < b { (a, b) } else { (b, a) };
84        self.edges.insert(key);
85    }
86    pub fn interfere(&self, a: u32, b: u32) -> bool {
87        let key = if a < b { (a, b) } else { (b, a) };
88        self.edges.contains(&key)
89    }
90    pub fn degree(&self, v: u32) -> usize {
91        self.edges
92            .iter()
93            .filter(|(a, b)| *a == v || *b == v)
94            .count()
95    }
96}
97/// Allocation site descriptor
98#[allow(dead_code)]
99#[derive(Debug, Clone, PartialEq, Eq, Hash)]
100pub struct AllocSite {
101    pub id: u32,
102    pub size_class: ReuseMemSizeClass,
103    pub func: String,
104    pub is_recursive: bool,
105}
106/// Reuse analysis region (for inter-procedural analysis)
107#[allow(dead_code)]
108#[derive(Debug, Clone)]
109pub struct ReuseRegion {
110    pub region_id: u32,
111    pub func_name: String,
112    pub allocs: Vec<AllocSite>,
113    pub decisions: Vec<(u32, ReuseDecision)>,
114}
115/// Information about which RC operations can be eliminated
116#[derive(Debug, Clone, PartialEq)]
117pub struct RcElimInfo {
118    /// The variable whose RC operation can be eliminated
119    pub var: LcnfVarId,
120    /// The kind of RC operation
121    pub kind: RcElimKind,
122    /// Why this elimination is valid
123    pub reason: RcElimReason,
124}
125#[allow(dead_code)]
126pub struct ORConstantFoldingHelper;
127impl ORConstantFoldingHelper {
128    #[allow(dead_code)]
129    pub fn fold_add_i64(a: i64, b: i64) -> Option<i64> {
130        a.checked_add(b)
131    }
132    #[allow(dead_code)]
133    pub fn fold_sub_i64(a: i64, b: i64) -> Option<i64> {
134        a.checked_sub(b)
135    }
136    #[allow(dead_code)]
137    pub fn fold_mul_i64(a: i64, b: i64) -> Option<i64> {
138        a.checked_mul(b)
139    }
140    #[allow(dead_code)]
141    pub fn fold_div_i64(a: i64, b: i64) -> Option<i64> {
142        if b == 0 {
143            None
144        } else {
145            a.checked_div(b)
146        }
147    }
148    #[allow(dead_code)]
149    pub fn fold_add_f64(a: f64, b: f64) -> f64 {
150        a + b
151    }
152    #[allow(dead_code)]
153    pub fn fold_mul_f64(a: f64, b: f64) -> f64 {
154        a * b
155    }
156    #[allow(dead_code)]
157    pub fn fold_neg_i64(a: i64) -> Option<i64> {
158        a.checked_neg()
159    }
160    #[allow(dead_code)]
161    pub fn fold_not_bool(a: bool) -> bool {
162        !a
163    }
164    #[allow(dead_code)]
165    pub fn fold_and_bool(a: bool, b: bool) -> bool {
166        a && b
167    }
168    #[allow(dead_code)]
169    pub fn fold_or_bool(a: bool, b: bool) -> bool {
170        a || b
171    }
172    #[allow(dead_code)]
173    pub fn fold_shl_i64(a: i64, b: u32) -> Option<i64> {
174        a.checked_shl(b)
175    }
176    #[allow(dead_code)]
177    pub fn fold_shr_i64(a: i64, b: u32) -> Option<i64> {
178        a.checked_shr(b)
179    }
180    #[allow(dead_code)]
181    pub fn fold_rem_i64(a: i64, b: i64) -> Option<i64> {
182        if b == 0 {
183            None
184        } else {
185            Some(a % b)
186        }
187    }
188    #[allow(dead_code)]
189    pub fn fold_bitand_i64(a: i64, b: i64) -> i64 {
190        a & b
191    }
192    #[allow(dead_code)]
193    pub fn fold_bitor_i64(a: i64, b: i64) -> i64 {
194        a | b
195    }
196    #[allow(dead_code)]
197    pub fn fold_bitxor_i64(a: i64, b: i64) -> i64 {
198        a ^ b
199    }
200    #[allow(dead_code)]
201    pub fn fold_bitnot_i64(a: i64) -> i64 {
202        !a
203    }
204}
205/// Reuse analysis allocation log
206#[allow(dead_code)]
207#[derive(Debug, Default)]
208pub struct ReuseAllocLog {
209    pub records: Vec<ReuseAllocRecord>,
210}
211#[allow(dead_code)]
212impl ReuseAllocLog {
213    pub fn new() -> Self {
214        Self::default()
215    }
216    pub fn record(&mut self, r: ReuseAllocRecord) {
217        self.records.push(r);
218    }
219    pub fn heap_count(&self) -> usize {
220        self.records
221            .iter()
222            .filter(|r| r.kind == ReuseAllocKind::Heap)
223            .count()
224    }
225    pub fn stack_count(&self) -> usize {
226        self.records
227            .iter()
228            .filter(|r| r.kind == ReuseAllocKind::Stack)
229            .count()
230    }
231    pub fn reuse_count(&self) -> usize {
232        self.records
233            .iter()
234            .filter(|r| r.reused_from.is_some())
235            .count()
236    }
237    pub fn total_bytes(&self) -> u64 {
238        self.records.iter().map(|r| r.size).sum()
239    }
240}
241/// Reuse analysis diagnostic
242#[allow(dead_code)]
243#[derive(Debug, Clone, PartialEq, Eq)]
244pub enum ReuseDiagLevel {
245    Info,
246    Warning,
247    Error,
248}
249/// Ownership status of a variable
250#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
251pub enum Ownership {
252    /// Unique owner (refcount = 1)
253    Unique,
254    /// Shared (refcount > 1 or unknown)
255    Shared,
256    /// Borrowed (temporary reference)
257    Borrowed,
258    /// Unknown ownership
259    Unknown,
260}
261impl Ownership {
262    pub(super) fn merge(self, other: Ownership) -> Ownership {
263        match (self, other) {
264            (Ownership::Unique, Ownership::Unique) => Ownership::Unique,
265            (Ownership::Borrowed, Ownership::Borrowed) => Ownership::Borrowed,
266            (Ownership::Shared, _) | (_, Ownership::Shared) => Ownership::Shared,
267            (Ownership::Unknown, _) | (_, Ownership::Unknown) => Ownership::Unknown,
268            _ => Ownership::Shared,
269        }
270    }
271}
272/// Simple lifetime analysis for borrow correctness
273///
274/// Ensures that borrowed values outlive their borrows.
275pub struct LifetimeAnalyzer {
276    /// Stack of active lifetimes
277    pub(super) active: Vec<LifetimeScope>,
278}
279impl LifetimeAnalyzer {
280    pub(super) fn new() -> Self {
281        LifetimeAnalyzer { active: Vec::new() }
282    }
283    pub(super) fn push_scope(&mut self) {
284        self.active.push(LifetimeScope {
285            defined: HashSet::new(),
286            borrowed: HashSet::new(),
287        });
288    }
289    pub(super) fn pop_scope(&mut self) -> Option<LifetimeScope> {
290        self.active.pop()
291    }
292    pub(super) fn define_var(&mut self, var: LcnfVarId) {
293        if let Some(scope) = self.active.last_mut() {
294            scope.defined.insert(var);
295        }
296    }
297    pub(super) fn borrow_var(&mut self, var: LcnfVarId) {
298        if let Some(scope) = self.active.last_mut() {
299            scope.borrowed.insert(var);
300        }
301    }
302    /// Check if a borrow is safe (the borrowed value outlives the borrow)
303    pub(super) fn is_borrow_safe(&self, borrowed_var: LcnfVarId) -> bool {
304        let mut found_definition = false;
305        for scope in &self.active {
306            if scope.defined.contains(&borrowed_var) {
307                found_definition = true;
308            }
309        }
310        found_definition
311    }
312    /// Analyze lifetimes in an expression
313    pub(super) fn analyze(&mut self, expr: &LcnfExpr) {
314        match expr {
315            LcnfExpr::Let { id, body, .. } => {
316                self.define_var(*id);
317                self.analyze(body);
318            }
319            LcnfExpr::Case { alts, default, .. } => {
320                for alt in alts {
321                    self.push_scope();
322                    for field in &alt.params {
323                        self.define_var(field.id);
324                    }
325                    self.analyze(&alt.body);
326                    self.pop_scope();
327                }
328                if let Some(def) = default {
329                    self.push_scope();
330                    self.analyze(def);
331                    self.pop_scope();
332                }
333            }
334            _ => {}
335        }
336    }
337}
338#[allow(dead_code)]
339#[derive(Debug, Clone, PartialEq)]
340pub enum ORPassPhase {
341    Analysis,
342    Transformation,
343    Verification,
344    Cleanup,
345}
346impl ORPassPhase {
347    #[allow(dead_code)]
348    pub fn name(&self) -> &str {
349        match self {
350            ORPassPhase::Analysis => "analysis",
351            ORPassPhase::Transformation => "transformation",
352            ORPassPhase::Verification => "verification",
353            ORPassPhase::Cleanup => "cleanup",
354        }
355    }
356    #[allow(dead_code)]
357    pub fn is_modifying(&self) -> bool {
358        matches!(self, ORPassPhase::Transformation | ORPassPhase::Cleanup)
359    }
360}
361#[allow(dead_code)]
362#[derive(Debug, Clone)]
363pub struct ReuseDiag {
364    pub level: ReuseDiagLevel,
365    pub message: String,
366    pub var: Option<u32>,
367}
368/// Reuse allocation kind
369#[allow(dead_code)]
370#[derive(Debug, Clone, PartialEq, Eq)]
371pub enum ReuseAllocKind {
372    Heap,
373    Stack,
374    Scratch,
375    Static,
376    Inline,
377}
378/// Free pool tracker (for reuse optimization)
379#[allow(dead_code)]
380#[derive(Debug, Default)]
381pub struct ReuseFreePool {
382    pub pool: std::collections::HashMap<ReuseMemSizeClass, Vec<u32>>,
383}
384#[allow(dead_code)]
385impl ReuseFreePool {
386    pub fn new() -> Self {
387        Self::default()
388    }
389    pub fn push(&mut self, var: u32, class: ReuseMemSizeClass) {
390        self.pool.entry(class).or_default().push(var);
391    }
392    pub fn pop(&mut self, class: &ReuseMemSizeClass) -> Option<u32> {
393        self.pool.get_mut(class)?.pop()
394    }
395    pub fn total_free(&self) -> usize {
396        self.pool.values().map(|v| v.len()).sum()
397    }
398}
399/// Reuse analysis statistics (extended)
400#[allow(dead_code)]
401#[derive(Debug, Default, Clone)]
402pub struct ReuseStatsExt {
403    pub allocs_analyzed: usize,
404    pub reuses_applied: usize,
405    pub stack_allocations: usize,
406    pub rc_bumps: usize,
407    pub inlines: usize,
408    pub scratch_uses: usize,
409    pub bytes_saved: u64,
410    pub allocs_eliminated: usize,
411}
412/// A potential reset-reuse opportunity
413#[derive(Debug, Clone, PartialEq)]
414pub struct ReuseOpportunity {
415    /// The variable being deallocated (potential reset source)
416    pub dealloc_var: LcnfVarId,
417    /// The allocation site (potential reuse target)
418    pub alloc_var: LcnfVarId,
419    /// Constructor name for the deallocation
420    pub dealloc_ctor: String,
421    /// Constructor tag for the deallocation
422    pub dealloc_tag: u32,
423    /// Constructor name for the allocation
424    pub alloc_ctor: String,
425    /// Constructor tag for the allocation
426    pub alloc_tag: u32,
427    /// Whether the layouts are compatible
428    pub layout_compatible: bool,
429    /// Estimated savings from this reuse (in allocation cost units)
430    pub estimated_savings: usize,
431}
432/// Main reuse analysis and optimization struct
433pub struct ReuseAnalyzer {
434    pub(super) config: ReuseConfig,
435    /// Borrow information for each variable
436    pub(super) borrow_info: HashMap<LcnfVarId, BorrowInfo>,
437    /// Detected reuse opportunities
438    pub(super) reuse_opportunities: Vec<ReuseOpportunity>,
439    /// RC operations that can be eliminated
440    pub(super) rc_eliminations: Vec<RcElimInfo>,
441    /// Statistics
442    pub(super) stats: ReuseStats,
443    /// Layout information cache
444    pub(super) layout_cache: HashMap<String, LayoutInfo>,
445    /// Use count for each variable
446    pub(super) use_counts: HashMap<LcnfVarId, usize>,
447}
448impl ReuseAnalyzer {
449    /// Create a new reuse analyzer
450    pub fn new(config: ReuseConfig) -> Self {
451        ReuseAnalyzer {
452            config,
453            borrow_info: HashMap::new(),
454            reuse_opportunities: Vec::new(),
455            rc_eliminations: Vec::new(),
456            stats: ReuseStats::default(),
457            layout_cache: HashMap::new(),
458            use_counts: HashMap::new(),
459        }
460    }
461    /// Get the optimization statistics
462    pub fn stats(&self) -> &ReuseStats {
463        &self.stats
464    }
465    /// Get detected reuse opportunities
466    pub fn reuse_opportunities(&self) -> &[ReuseOpportunity] {
467        &self.reuse_opportunities
468    }
469    /// Get RC eliminations
470    pub fn rc_eliminations(&self) -> &[RcElimInfo] {
471        &self.rc_eliminations
472    }
473    /// Get borrow info for a variable
474    pub fn get_borrow_info(&self, var: &LcnfVarId) -> Option<&BorrowInfo> {
475        self.borrow_info.get(var)
476    }
477    /// Analyze a single declaration
478    pub(super) fn analyze_decl(&mut self, decl: &LcnfFunDecl) {
479        self.use_counts.clear();
480        self.compute_use_counts(&decl.body);
481        for param in &decl.params {
482            self.stats.vars_analyzed += 1;
483            let ownership = if self.use_counts.get(&param.id).copied().unwrap_or(0) <= 1 {
484                Ownership::Unique
485            } else {
486                Ownership::Shared
487            };
488            let info = BorrowInfo::with_ownership(param.id, ownership);
489            self.borrow_info.insert(param.id, info);
490        }
491        self.analyze_ownership(&decl.body, 0);
492        if self.config.enable_borrow {
493            self.infer_borrows(decl);
494        }
495        if self.config.enable_reset_reuse {
496            self.find_reuse_opportunities(&decl.body);
497        }
498        if self.config.enable_rc_elim {
499            self.find_rc_eliminations(&decl.body);
500        }
501    }
502    /// Compute use counts for all variables in an expression
503    pub(super) fn compute_use_counts(&mut self, expr: &LcnfExpr) {
504        match expr {
505            LcnfExpr::Let { value, body, .. } => {
506                self.count_value_uses(value);
507                self.compute_use_counts(body);
508            }
509            LcnfExpr::Case {
510                scrutinee,
511                alts,
512                default,
513                ..
514            } => {
515                *self.use_counts.entry(*scrutinee).or_insert(0) += 1;
516                for alt in alts {
517                    self.compute_use_counts(&alt.body);
518                }
519                if let Some(def) = default {
520                    self.compute_use_counts(def);
521                }
522            }
523            LcnfExpr::Return(arg) => {
524                if let LcnfArg::Var(v) = arg {
525                    *self.use_counts.entry(*v).or_insert(0) += 1;
526                }
527            }
528            LcnfExpr::TailCall(func, args) => {
529                if let LcnfArg::Var(v) = func {
530                    *self.use_counts.entry(*v).or_insert(0) += 1;
531                }
532                for a in args {
533                    if let LcnfArg::Var(v) = a {
534                        *self.use_counts.entry(*v).or_insert(0) += 1;
535                    }
536                }
537            }
538            LcnfExpr::Unreachable => {}
539        }
540    }
541    /// Count uses in a let-value
542    pub(super) fn count_value_uses(&mut self, value: &LcnfLetValue) {
543        match value {
544            LcnfLetValue::App(func, args) => {
545                if let LcnfArg::Var(v) = func {
546                    *self.use_counts.entry(*v).or_insert(0) += 1;
547                }
548                for a in args {
549                    if let LcnfArg::Var(v) = a {
550                        *self.use_counts.entry(*v).or_insert(0) += 1;
551                    }
552                }
553            }
554            LcnfLetValue::Proj(_, _, v) => {
555                *self.use_counts.entry(*v).or_insert(0) += 1;
556            }
557            LcnfLetValue::Ctor(_, _, args) => {
558                for a in args {
559                    if let LcnfArg::Var(v) = a {
560                        *self.use_counts.entry(*v).or_insert(0) += 1;
561                    }
562                }
563            }
564            LcnfLetValue::FVar(v) => {
565                *self.use_counts.entry(*v).or_insert(0) += 1;
566            }
567            LcnfLetValue::Lit(_) | LcnfLetValue::Erased => {}
568            LcnfLetValue::Reset(v) => {
569                *self.use_counts.entry(*v).or_insert(0) += 1;
570            }
571            LcnfLetValue::Reuse(slot, _, _, args) => {
572                *self.use_counts.entry(*slot).or_insert(0) += 1;
573                for a in args {
574                    if let LcnfArg::Var(v) = a {
575                        *self.use_counts.entry(*v).or_insert(0) += 1;
576                    }
577                }
578            }
579        }
580    }
581    /// Analyze ownership status through an expression
582    pub(super) fn analyze_ownership(&mut self, expr: &LcnfExpr, depth: usize) {
583        if depth > self.config.analysis_depth {
584            return;
585        }
586        match expr {
587            LcnfExpr::Let {
588                id, value, body, ..
589            } => {
590                self.stats.vars_analyzed += 1;
591                let ownership = self.infer_value_ownership(value);
592                let mut info = BorrowInfo::with_ownership(*id, ownership);
593                match value {
594                    LcnfLetValue::FVar(src) | LcnfLetValue::Proj(_, _, src) => {
595                        info.derived_from.push(*src);
596                    }
597                    _ => {}
598                }
599                info.escapes = self.does_escape(*id, body);
600                if ownership == Ownership::Unique {
601                    self.stats.unique_ownership += 1;
602                }
603                self.borrow_info.insert(*id, info);
604                self.analyze_ownership(body, depth + 1);
605            }
606            LcnfExpr::Case { alts, default, .. } => {
607                for alt in alts {
608                    for field in &alt.params {
609                        self.stats.vars_analyzed += 1;
610                        let info = BorrowInfo::new(field.id);
611                        self.borrow_info.insert(field.id, info);
612                    }
613                    self.analyze_ownership(&alt.body, depth + 1);
614                }
615                if let Some(def) = default {
616                    self.analyze_ownership(def, depth + 1);
617                }
618            }
619            _ => {}
620        }
621    }
622    /// Infer ownership status from a let-value
623    pub(super) fn infer_value_ownership(&self, value: &LcnfLetValue) -> Ownership {
624        match value {
625            LcnfLetValue::Ctor(_, _, _) => Ownership::Unique,
626            LcnfLetValue::Lit(_) => Ownership::Unique,
627            LcnfLetValue::Erased => Ownership::Unique,
628            LcnfLetValue::FVar(src) => self
629                .borrow_info
630                .get(src)
631                .map(|info| info.ownership)
632                .unwrap_or(Ownership::Unknown),
633            LcnfLetValue::Proj(_, _, src) => {
634                let parent_ownership = self
635                    .borrow_info
636                    .get(src)
637                    .map(|info| info.ownership)
638                    .unwrap_or(Ownership::Unknown);
639                match parent_ownership {
640                    Ownership::Unique => Ownership::Borrowed,
641                    _ => Ownership::Shared,
642                }
643            }
644            LcnfLetValue::App(_, _) => Ownership::Unknown,
645            LcnfLetValue::Reset(_) => Ownership::Unique,
646            LcnfLetValue::Reuse(_, _, _, _) => Ownership::Unique,
647        }
648    }
649    /// Check if a variable escapes the current scope
650    pub(super) fn does_escape(&self, var: LcnfVarId, expr: &LcnfExpr) -> bool {
651        match expr {
652            LcnfExpr::Return(LcnfArg::Var(v)) => *v == var,
653            LcnfExpr::TailCall(_, args) => args
654                .iter()
655                .any(|a| matches!(a, LcnfArg::Var(v) if * v == var)),
656            LcnfExpr::Let { value, body, .. } => {
657                let in_value = match value {
658                    LcnfLetValue::Ctor(_, _, args) => args
659                        .iter()
660                        .any(|a| matches!(a, LcnfArg::Var(v) if * v == var)),
661                    LcnfLetValue::App(_, args) => args
662                        .iter()
663                        .any(|a| matches!(a, LcnfArg::Var(v) if * v == var)),
664                    _ => false,
665                };
666                in_value || self.does_escape(var, body)
667            }
668            LcnfExpr::Case { alts, default, .. } => {
669                alts.iter().any(|a| self.does_escape(var, &a.body))
670                    || default.as_ref().is_some_and(|d| self.does_escape(var, d))
671            }
672            _ => false,
673        }
674    }
675    /// Infer borrow annotations for function parameters
676    pub(super) fn infer_borrows(&mut self, decl: &LcnfFunDecl) {
677        for param in &decl.params {
678            let use_count = self.use_counts.get(&param.id).copied().unwrap_or(0);
679            let escapes = self.does_escape(param.id, &decl.body);
680            if !escapes && use_count <= 2 {
681                if let Some(info) = self.borrow_info.get_mut(&param.id) {
682                    info.can_borrow = true;
683                    info.ownership = Ownership::Borrowed;
684                    self.stats.borrows_inferred += 1;
685                }
686            }
687        }
688    }
689    /// Find reset-reuse opportunities
690    pub(super) fn find_reuse_opportunities(&mut self, expr: &LcnfExpr) {
691        match expr {
692            LcnfExpr::Case {
693                scrutinee, alts, ..
694            } => {
695                let scrutinee_unique = self
696                    .borrow_info
697                    .get(scrutinee)
698                    .is_some_and(|info| info.ownership == Ownership::Unique);
699                if scrutinee_unique {
700                    for alt in alts {
701                        self.find_ctor_in_body(
702                            &alt.body,
703                            *scrutinee,
704                            &alt.ctor_name,
705                            alt.ctor_tag,
706                            alt.params.len(),
707                        );
708                    }
709                }
710                for alt in alts {
711                    self.find_reuse_opportunities(&alt.body);
712                }
713            }
714            LcnfExpr::Let { body, .. } => {
715                self.find_reuse_opportunities(body);
716            }
717            _ => {}
718        }
719    }
720    /// Find constructor allocations in a case body that could reuse the scrutinee
721    pub(super) fn find_ctor_in_body(
722        &mut self,
723        expr: &LcnfExpr,
724        scrutinee: LcnfVarId,
725        dealloc_ctor: &str,
726        dealloc_tag: u32,
727        dealloc_fields: usize,
728    ) {
729        match expr {
730            LcnfExpr::Let {
731                id,
732                value: LcnfLetValue::Ctor(alloc_ctor, alloc_tag, args),
733                body,
734                ..
735            } => {
736                let dealloc_layout = LayoutInfo::new(dealloc_ctor, dealloc_tag, dealloc_fields, 0);
737                let alloc_layout = LayoutInfo::new(alloc_ctor, *alloc_tag, args.len(), 0);
738                if alloc_layout.fits_in(&dealloc_layout) {
739                    self.reuse_opportunities.push(ReuseOpportunity {
740                        dealloc_var: scrutinee,
741                        alloc_var: *id,
742                        dealloc_ctor: dealloc_ctor.to_string(),
743                        dealloc_tag,
744                        alloc_ctor: alloc_ctor.clone(),
745                        alloc_tag: *alloc_tag,
746                        layout_compatible: dealloc_layout.is_compatible_with(&alloc_layout),
747                        estimated_savings: alloc_layout.total_words * 8,
748                    });
749                    self.stats.reuse_pairs += 1;
750                }
751                self.find_ctor_in_body(body, scrutinee, dealloc_ctor, dealloc_tag, dealloc_fields);
752            }
753            LcnfExpr::Let { body, .. } => {
754                self.find_ctor_in_body(body, scrutinee, dealloc_ctor, dealloc_tag, dealloc_fields);
755            }
756            _ => {}
757        }
758    }
759    /// Find RC operations that can be eliminated
760    pub(super) fn find_rc_eliminations(&mut self, expr: &LcnfExpr) {
761        match expr {
762            LcnfExpr::Let {
763                id, value, body, ..
764            } => {
765                if let Some(info) = self.borrow_info.get(id) {
766                    if info.ownership == Ownership::Unique {
767                        let use_count = self.use_counts.get(id).copied().unwrap_or(0);
768                        if use_count <= 1 {
769                            self.rc_eliminations.push(RcElimInfo {
770                                var: *id,
771                                kind: RcElimKind::SkipDec,
772                                reason: RcElimReason::UniqueOwnership,
773                            });
774                            self.stats.rc_ops_eliminated += 1;
775                        }
776                    } else if info.can_borrow {
777                        self.rc_eliminations.push(RcElimInfo {
778                            var: *id,
779                            kind: RcElimKind::SkipInc,
780                            reason: RcElimReason::Borrowed,
781                        });
782                        self.stats.rc_ops_eliminated += 1;
783                    }
784                }
785                self.find_cancel_pairs(value, *id);
786                self.find_rc_eliminations(body);
787            }
788            LcnfExpr::Case { alts, default, .. } => {
789                for alt in alts {
790                    self.find_rc_eliminations(&alt.body);
791                }
792                if let Some(def) = default {
793                    self.find_rc_eliminations(def);
794                }
795            }
796            _ => {}
797        }
798    }
799    /// Find RC inc/dec pairs that cancel each other
800    pub(super) fn find_cancel_pairs(&mut self, value: &LcnfLetValue, id: LcnfVarId) {
801        if let LcnfLetValue::App(_, args) = value {
802            for arg in args {
803                if let LcnfArg::Var(x) = arg {
804                    let use_count = self.use_counts.get(x).copied().unwrap_or(0);
805                    if use_count <= 1 {
806                        if !self
807                            .rc_eliminations
808                            .iter()
809                            .any(|e| e.var == *x && e.kind == RcElimKind::CancelPair)
810                        {
811                            self.rc_eliminations.push(RcElimInfo {
812                                var: *x,
813                                kind: RcElimKind::CancelPair,
814                                reason: RcElimReason::CancelledPair,
815                            });
816                            self.stats.rc_ops_eliminated += 1;
817                        }
818                    }
819                }
820            }
821        }
822        let _ = id;
823    }
824    /// Apply reset-reuse transformations to an expression
825    pub(super) fn apply_reuse(&self, expr: &mut LcnfExpr) {
826        let reuse_map: HashMap<LcnfVarId, &ReuseOpportunity> = self
827            .reuse_opportunities
828            .iter()
829            .map(|opp| (opp.alloc_var, opp))
830            .collect();
831        self.apply_reuse_inner(expr, &reuse_map);
832    }
833    /// Recursively apply reuse transformations.
834    ///
835    /// When `id` is in `reuse_map` and the bound value is a `Ctor`, we replace:
836    /// ```text
837    ///   let id = Ctor(args...)
838    /// ```
839    /// with:
840    /// ```text
841    ///   let slot = reset(dealloc_var)
842    ///   let id   = reuse(slot, Ctor, args...)
843    /// ```
844    pub(super) fn apply_reuse_inner(
845        &self,
846        expr: &mut LcnfExpr,
847        reuse_map: &HashMap<LcnfVarId, &ReuseOpportunity>,
848    ) {
849        let can_reuse = if let LcnfExpr::Let { id, value, .. } = &*expr {
850            matches!(value, LcnfLetValue::Ctor(..))
851                && reuse_map.get(id).is_some_and(|o| o.layout_compatible)
852        } else {
853            false
854        };
855        let (dealloc_var, cur_id) = if can_reuse {
856            if let LcnfExpr::Let { id, .. } = &*expr {
857                let opp = reuse_map[id];
858                (opp.dealloc_var, *id)
859            } else {
860                unreachable!()
861            }
862        } else {
863            (LcnfVarId(0), LcnfVarId(0))
864        };
865        if can_reuse {
866            let slot_id = LcnfVarId(cur_id.0 + 1_000_000);
867            let old_expr = std::mem::replace(expr, LcnfExpr::Unreachable);
868            if let LcnfExpr::Let {
869                id: old_id,
870                name,
871                ty,
872                value: LcnfLetValue::Ctor(ctor_name, ctor_tag, args),
873                body,
874            } = old_expr
875            {
876                let reuse_let = LcnfExpr::Let {
877                    id: old_id,
878                    name: name.clone(),
879                    ty,
880                    value: LcnfLetValue::Reuse(slot_id, ctor_name, ctor_tag, args),
881                    body,
882                };
883                let reset_let = LcnfExpr::Let {
884                    id: slot_id,
885                    name: format!("{}_slot", name),
886                    ty: LcnfType::Object,
887                    value: LcnfLetValue::Reset(dealloc_var),
888                    body: Box::new(reuse_let),
889                };
890                *expr = reset_let;
891                if let LcnfExpr::Let {
892                    body: slot_body, ..
893                } = expr
894                {
895                    if let LcnfExpr::Let {
896                        body: reuse_body, ..
897                    } = slot_body.as_mut()
898                    {
899                        self.apply_reuse_inner(reuse_body, reuse_map);
900                    }
901                }
902                return;
903            }
904        }
905        match expr {
906            LcnfExpr::Let { body, .. } => {
907                self.apply_reuse_inner(body, reuse_map);
908            }
909            LcnfExpr::Case { alts, default, .. } => {
910                for alt in alts.iter_mut() {
911                    self.apply_reuse_inner(&mut alt.body, reuse_map);
912                }
913                if let Some(def) = default {
914                    self.apply_reuse_inner(def, reuse_map);
915                }
916            }
917            _ => {}
918        }
919    }
920    /// Apply borrow annotations to a declaration
921    pub(super) fn apply_borrows(&self, decl: &mut LcnfFunDecl) {
922        for param in &mut decl.params {
923            if let Some(info) = self.borrow_info.get(&param.id) {
924                if info.can_borrow && !param.erased {
925                    param.borrowed = true;
926                }
927            }
928        }
929    }
930}
931/// Statistics for reuse optimization
932#[derive(Debug, Clone, Default)]
933pub struct ReuseStats {
934    /// Number of reset-reuse pairs found
935    pub reuse_pairs: usize,
936    /// Number of borrows inferred
937    pub borrows_inferred: usize,
938    /// Number of RC operations eliminated
939    pub rc_ops_eliminated: usize,
940    /// Number of in-place updates enabled
941    pub in_place_updates: usize,
942    /// Number of unique ownership inferences
943    pub unique_ownership: usize,
944    /// Number of variables analyzed
945    pub vars_analyzed: usize,
946}
947/// Reuse analysis interprocedural summary
948#[allow(dead_code)]
949#[derive(Debug, Default)]
950pub struct ReuseIPASummary {
951    pub regions: Vec<ReuseRegion>,
952    pub global_reuse_count: usize,
953    pub global_stack_count: usize,
954    pub total_bytes_saved: u64,
955}
956#[allow(dead_code)]
957impl ReuseIPASummary {
958    pub fn new() -> Self {
959        Self::default()
960    }
961    pub fn add_region(&mut self, r: ReuseRegion) {
962        self.global_reuse_count += r
963            .decisions
964            .iter()
965            .filter(|(_, d)| matches!(d, ReuseDecision::Reuse(_)))
966            .count();
967        self.global_stack_count += r
968            .decisions
969            .iter()
970            .filter(|(_, d)| *d == ReuseDecision::StackAlloc)
971            .count();
972        self.regions.push(r);
973    }
974    pub fn region_count(&self) -> usize {
975        self.regions.len()
976    }
977}
978#[allow(dead_code)]
979#[derive(Debug, Clone)]
980pub struct ORDepGraph {
981    pub(super) nodes: Vec<u32>,
982    pub(super) edges: Vec<(u32, u32)>,
983}
984impl ORDepGraph {
985    #[allow(dead_code)]
986    pub fn new() -> Self {
987        ORDepGraph {
988            nodes: Vec::new(),
989            edges: Vec::new(),
990        }
991    }
992    #[allow(dead_code)]
993    pub fn add_node(&mut self, id: u32) {
994        if !self.nodes.contains(&id) {
995            self.nodes.push(id);
996        }
997    }
998    #[allow(dead_code)]
999    pub fn add_dep(&mut self, dep: u32, dependent: u32) {
1000        self.add_node(dep);
1001        self.add_node(dependent);
1002        self.edges.push((dep, dependent));
1003    }
1004    #[allow(dead_code)]
1005    pub fn dependents_of(&self, node: u32) -> Vec<u32> {
1006        self.edges
1007            .iter()
1008            .filter(|(d, _)| *d == node)
1009            .map(|(_, dep)| *dep)
1010            .collect()
1011    }
1012    #[allow(dead_code)]
1013    pub fn dependencies_of(&self, node: u32) -> Vec<u32> {
1014        self.edges
1015            .iter()
1016            .filter(|(_, dep)| *dep == node)
1017            .map(|(d, _)| *d)
1018            .collect()
1019    }
1020    #[allow(dead_code)]
1021    pub fn topological_sort(&self) -> Vec<u32> {
1022        let mut in_degree: std::collections::HashMap<u32, u32> = std::collections::HashMap::new();
1023        for &n in &self.nodes {
1024            in_degree.insert(n, 0);
1025        }
1026        for (_, dep) in &self.edges {
1027            *in_degree.entry(*dep).or_insert(0) += 1;
1028        }
1029        let mut queue: std::collections::VecDeque<u32> = self
1030            .nodes
1031            .iter()
1032            .filter(|&&n| in_degree[&n] == 0)
1033            .copied()
1034            .collect();
1035        let mut result = Vec::new();
1036        while let Some(node) = queue.pop_front() {
1037            result.push(node);
1038            for dep in self.dependents_of(node) {
1039                let cnt = in_degree.entry(dep).or_insert(0);
1040                *cnt = cnt.saturating_sub(1);
1041                if *cnt == 0 {
1042                    queue.push_back(dep);
1043                }
1044            }
1045        }
1046        result
1047    }
1048    #[allow(dead_code)]
1049    pub fn has_cycle(&self) -> bool {
1050        self.topological_sort().len() < self.nodes.len()
1051    }
1052}
1053/// Borrow analysis results for a variable
1054#[derive(Debug, Clone, PartialEq)]
1055pub struct BorrowInfo {
1056    /// The variable being analyzed
1057    pub var: LcnfVarId,
1058    /// Whether this variable can be borrowed (vs owned)
1059    pub can_borrow: bool,
1060    /// The ownership status
1061    pub ownership: Ownership,
1062    /// Where this variable is last used
1063    pub last_use: Option<usize>,
1064    /// Whether this variable escapes the current scope
1065    pub escapes: bool,
1066    /// Which parameters this variable is derived from
1067    pub derived_from: Vec<LcnfVarId>,
1068}
1069impl BorrowInfo {
1070    pub(super) fn new(var: LcnfVarId) -> Self {
1071        BorrowInfo {
1072            var,
1073            can_borrow: false,
1074            ownership: Ownership::Unknown,
1075            last_use: None,
1076            escapes: false,
1077            derived_from: Vec::new(),
1078        }
1079    }
1080    pub(super) fn with_ownership(var: LcnfVarId, ownership: Ownership) -> Self {
1081        BorrowInfo {
1082            var,
1083            can_borrow: matches!(ownership, Ownership::Borrowed),
1084            ownership,
1085            last_use: None,
1086            escapes: false,
1087            derived_from: Vec::new(),
1088        }
1089    }
1090}
1091/// Reuse analysis emit stats
1092#[allow(dead_code)]
1093#[derive(Debug, Default, Clone)]
1094pub struct ReuseExtEmitStats {
1095    pub bytes_written: usize,
1096    pub items_emitted: usize,
1097    pub errors: usize,
1098    pub warnings: usize,
1099}
1100/// Allocation size class
1101#[allow(dead_code)]
1102#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1103pub enum ReuseMemSizeClass {
1104    Tiny,
1105    Small,
1106    Medium,
1107    Large,
1108    Huge,
1109    Dynamic,
1110}
1111/// Reuse analysis global config
1112#[allow(dead_code)]
1113#[derive(Debug, Clone, Default)]
1114pub struct ReuseGlobalConfig {
1115    pub max_scratch_size: u64,
1116    pub enable_ipa: bool,
1117    pub enable_coloring: bool,
1118    pub enable_linear_scan: bool,
1119    pub max_regions: usize,
1120}
1121#[allow(dead_code)]
1122pub struct ReuseAllocSiteInfo {
1123    pub site: AllocSite,
1124    pub live_range: ReuseLiveRange,
1125    pub decision: ReuseDecision,
1126}
1127/// Reuse analysis feature flags
1128#[allow(dead_code)]
1129#[derive(Debug, Clone, Default)]
1130pub struct ReuseFeatureFlags {
1131    pub reuse_constructor_allocated: bool,
1132    pub reuse_rc_singleton: bool,
1133    pub reuse_trivial: bool,
1134    pub inline_small_closures: bool,
1135}
1136/// An in-place update opportunity
1137#[derive(Debug, Clone)]
1138pub struct InPlaceUpdate {
1139    /// The source variable (being updated)
1140    pub(super) source: LcnfVarId,
1141    /// The result variable (the updated value)
1142    pub(super) result: LcnfVarId,
1143    /// Which fields are changed (index -> new value)
1144    pub(super) changed_fields: Vec<(usize, LcnfArg)>,
1145}
1146#[allow(dead_code)]
1147#[derive(Debug, Default)]
1148pub struct ReuseDiagSink {
1149    pub diags: Vec<ReuseDiag>,
1150}
1151#[allow(dead_code)]
1152impl ReuseDiagSink {
1153    pub fn new() -> Self {
1154        Self::default()
1155    }
1156    pub fn push(&mut self, level: ReuseDiagLevel, msg: &str, var: Option<u32>) {
1157        self.diags.push(ReuseDiag {
1158            level,
1159            message: msg.to_string(),
1160            var,
1161        });
1162    }
1163    pub fn has_errors(&self) -> bool {
1164        self.diags.iter().any(|d| d.level == ReuseDiagLevel::Error)
1165    }
1166}
1167/// Configuration for reuse optimization
1168#[derive(Debug, Clone)]
1169pub struct ReuseConfig {
1170    /// Enable reset-reuse optimization
1171    pub enable_reset_reuse: bool,
1172    /// Enable borrow inference
1173    pub enable_borrow: bool,
1174    /// Enable reference counting elimination
1175    pub enable_rc_elim: bool,
1176    /// Enable in-place update optimization
1177    pub enable_in_place: bool,
1178    /// Maximum depth for ownership analysis
1179    pub analysis_depth: usize,
1180    /// Whether to track ownership through function calls
1181    pub interprocedural: bool,
1182}
1183#[allow(dead_code)]
1184#[derive(Debug, Clone, Default)]
1185pub struct ORPassStats {
1186    pub total_runs: u32,
1187    pub successful_runs: u32,
1188    pub total_changes: u64,
1189    pub time_ms: u64,
1190    pub iterations_used: u32,
1191}
1192impl ORPassStats {
1193    #[allow(dead_code)]
1194    pub fn new() -> Self {
1195        Self::default()
1196    }
1197    #[allow(dead_code)]
1198    pub fn record_run(&mut self, changes: u64, time_ms: u64, iterations: u32) {
1199        self.total_runs += 1;
1200        self.successful_runs += 1;
1201        self.total_changes += changes;
1202        self.time_ms += time_ms;
1203        self.iterations_used = iterations;
1204    }
1205    #[allow(dead_code)]
1206    pub fn average_changes_per_run(&self) -> f64 {
1207        if self.total_runs == 0 {
1208            return 0.0;
1209        }
1210        self.total_changes as f64 / self.total_runs as f64
1211    }
1212    #[allow(dead_code)]
1213    pub fn success_rate(&self) -> f64 {
1214        if self.total_runs == 0 {
1215            return 0.0;
1216        }
1217        self.successful_runs as f64 / self.total_runs as f64
1218    }
1219    #[allow(dead_code)]
1220    pub fn format_summary(&self) -> String {
1221        format!(
1222            "Runs: {}/{}, Changes: {}, Time: {}ms",
1223            self.successful_runs, self.total_runs, self.total_changes, self.time_ms
1224        )
1225    }
1226}
1227/// Reuse analysis pass config (extended)
1228#[allow(dead_code)]
1229#[derive(Debug, Clone)]
1230pub struct ReuseConfigExt {
1231    pub enable_reuse: bool,
1232    pub enable_stack_alloc: bool,
1233    pub enable_inline: bool,
1234    pub enable_scratch_buffer: bool,
1235    pub max_reuse_distance: usize,
1236    pub max_live_ranges: usize,
1237    pub scratch_buffer_size: u64,
1238}
1239/// Kind of RC elimination
1240#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1241pub enum RcElimKind {
1242    /// Remove an increment (the value is uniquely owned)
1243    SkipInc,
1244    /// Remove a decrement (the value is still live)
1245    SkipDec,
1246    /// Combine inc+dec into nothing
1247    CancelPair,
1248}
1249/// Reuse analysis source buffer
1250#[allow(dead_code)]
1251#[derive(Debug, Default)]
1252pub struct ReuseExtSourceBuffer {
1253    pub content: String,
1254}
1255#[allow(dead_code)]
1256impl ReuseExtSourceBuffer {
1257    pub fn new() -> Self {
1258        Self::default()
1259    }
1260    pub fn write(&mut self, s: &str) {
1261        self.content.push_str(s);
1262    }
1263    pub fn writeln(&mut self, s: &str) {
1264        self.content.push_str(s);
1265        self.content.push('\n');
1266    }
1267    pub fn finish(self) -> String {
1268        self.content
1269    }
1270}
1271/// Reuse analysis builder
1272#[allow(dead_code)]
1273#[derive(Debug, Default)]
1274pub struct ReusePassBuilder {
1275    pub config: ReuseConfigExt,
1276    pub decisions: ReuseDecisionMap,
1277    pub stats: ReuseStatsExt,
1278    pub diags: ReuseDiagSink,
1279}
1280#[allow(dead_code)]
1281impl ReusePassBuilder {
1282    pub fn new() -> Self {
1283        Self::default()
1284    }
1285    pub fn with_config(mut self, cfg: ReuseConfigExt) -> Self {
1286        self.config = cfg;
1287        self
1288    }
1289    pub fn report(&self) -> String {
1290        format!("{}", self.stats)
1291    }
1292}
1293/// Reuse analysis linear scan allocator (for scratch buffer assignment)
1294#[allow(dead_code)]
1295#[derive(Debug, Default)]
1296pub struct ReuseLinearScanAlloc {
1297    pub slots: Vec<(u64, bool)>,
1298    pub assignment: std::collections::HashMap<u32, usize>,
1299}
1300#[allow(dead_code)]
1301impl ReuseLinearScanAlloc {
1302    pub fn new() -> Self {
1303        Self::default()
1304    }
1305    pub fn alloc(&mut self, var: u32, size: u64) -> usize {
1306        for (i, (slot_sz, in_use)) in self.slots.iter_mut().enumerate() {
1307            if !*in_use && *slot_sz >= size {
1308                *in_use = true;
1309                self.assignment.insert(var, i);
1310                return i;
1311            }
1312        }
1313        let i = self.slots.len();
1314        self.slots.push((size, true));
1315        self.assignment.insert(var, i);
1316        i
1317    }
1318    pub fn free(&mut self, var: u32) {
1319        if let Some(i) = self.assignment.remove(&var) {
1320            if let Some((_, in_use)) = self.slots.get_mut(i) {
1321                *in_use = false;
1322            }
1323        }
1324    }
1325    pub fn slots_used(&self) -> usize {
1326        self.slots.iter().filter(|(_, u)| *u).count()
1327    }
1328    pub fn total_slots(&self) -> usize {
1329        self.slots.len()
1330    }
1331}
1332#[allow(dead_code)]
1333#[derive(Debug, Clone)]
1334pub struct ORCacheEntry {
1335    pub key: String,
1336    pub data: Vec<u8>,
1337    pub timestamp: u64,
1338    pub valid: bool,
1339}
1340/// Reuse allocation record
1341#[allow(dead_code)]
1342#[derive(Debug, Clone)]
1343pub struct ReuseAllocRecord {
1344    pub var: u32,
1345    pub kind: ReuseAllocKind,
1346    pub size: u64,
1347    pub reused_from: Option<u32>,
1348}
1349/// Reuse analysis profiler
1350#[allow(dead_code)]
1351#[derive(Debug, Default)]
1352pub struct ReuseExtProfiler {
1353    pub timings: Vec<(String, u64)>,
1354}
1355#[allow(dead_code)]
1356impl ReuseExtProfiler {
1357    pub fn new() -> Self {
1358        Self::default()
1359    }
1360    pub fn record(&mut self, pass: &str, us: u64) {
1361        self.timings.push((pass.to_string(), us));
1362    }
1363    pub fn total_us(&self) -> u64 {
1364        self.timings.iter().map(|(_, t)| *t).sum()
1365    }
1366}
1367#[allow(dead_code)]
1368#[derive(Debug, Clone)]
1369pub struct ORWorklist {
1370    pub(super) items: std::collections::VecDeque<u32>,
1371    pub(super) in_worklist: std::collections::HashSet<u32>,
1372}
1373impl ORWorklist {
1374    #[allow(dead_code)]
1375    pub fn new() -> Self {
1376        ORWorklist {
1377            items: std::collections::VecDeque::new(),
1378            in_worklist: std::collections::HashSet::new(),
1379        }
1380    }
1381    #[allow(dead_code)]
1382    pub fn push(&mut self, item: u32) -> bool {
1383        if self.in_worklist.insert(item) {
1384            self.items.push_back(item);
1385            true
1386        } else {
1387            false
1388        }
1389    }
1390    #[allow(dead_code)]
1391    pub fn pop(&mut self) -> Option<u32> {
1392        let item = self.items.pop_front()?;
1393        self.in_worklist.remove(&item);
1394        Some(item)
1395    }
1396    #[allow(dead_code)]
1397    pub fn is_empty(&self) -> bool {
1398        self.items.is_empty()
1399    }
1400    #[allow(dead_code)]
1401    pub fn len(&self) -> usize {
1402        self.items.len()
1403    }
1404    #[allow(dead_code)]
1405    pub fn contains(&self, item: u32) -> bool {
1406        self.in_worklist.contains(&item)
1407    }
1408}
1409/// Reuse analysis coloring (for register-like reuse assignment)
1410#[allow(dead_code)]
1411#[derive(Debug, Default)]
1412pub struct ReuseColoring {
1413    pub color: std::collections::HashMap<u32, u32>,
1414    pub num_colors: u32,
1415}
1416#[allow(dead_code)]
1417impl ReuseColoring {
1418    pub fn new() -> Self {
1419        Self::default()
1420    }
1421    pub fn assign(&mut self, var: u32, color: u32) {
1422        self.color.insert(var, color);
1423        self.num_colors = self.num_colors.max(color + 1);
1424    }
1425    pub fn get(&self, var: u32) -> Option<u32> {
1426        self.color.get(&var).copied()
1427    }
1428    pub fn colors_used(&self) -> u32 {
1429        self.num_colors
1430    }
1431    pub fn vars_with_color(&self, c: u32) -> Vec<u32> {
1432        self.color
1433            .iter()
1434            .filter(|(_, &col)| col == c)
1435            .map(|(&v, _)| v)
1436            .collect()
1437    }
1438}
1439#[allow(dead_code)]
1440#[derive(Debug, Clone)]
1441pub struct ORPassConfig {
1442    pub phase: ORPassPhase,
1443    pub enabled: bool,
1444    pub max_iterations: u32,
1445    pub debug_output: bool,
1446    pub pass_name: String,
1447}
1448impl ORPassConfig {
1449    #[allow(dead_code)]
1450    pub fn new(name: impl Into<String>, phase: ORPassPhase) -> Self {
1451        ORPassConfig {
1452            phase,
1453            enabled: true,
1454            max_iterations: 10,
1455            debug_output: false,
1456            pass_name: name.into(),
1457        }
1458    }
1459    #[allow(dead_code)]
1460    pub fn disabled(mut self) -> Self {
1461        self.enabled = false;
1462        self
1463    }
1464    #[allow(dead_code)]
1465    pub fn with_debug(mut self) -> Self {
1466        self.debug_output = true;
1467        self
1468    }
1469    #[allow(dead_code)]
1470    pub fn max_iter(mut self, n: u32) -> Self {
1471        self.max_iterations = n;
1472        self
1473    }
1474}
1475/// Live range for reuse analysis
1476#[allow(dead_code)]
1477#[derive(Debug, Clone)]
1478pub struct ReuseLiveRange {
1479    pub var: u32,
1480    pub def_point: usize,
1481    pub last_use: usize,
1482    pub size_class: ReuseMemSizeClass,
1483}
1484impl ReuseLiveRange {
1485    #[allow(dead_code)]
1486    pub fn overlaps(&self, other: &ReuseLiveRange) -> bool {
1487        !(self.last_use < other.def_point || other.last_use < self.def_point)
1488    }
1489    #[allow(dead_code)]
1490    pub fn can_reuse(&self, other: &ReuseLiveRange) -> bool {
1491        !self.overlaps(other) && self.size_class == other.size_class
1492    }
1493}
1494/// Reuse analysis code stats
1495#[allow(dead_code)]
1496#[derive(Debug, Default, Clone)]
1497pub struct ReuseCodeStats {
1498    pub allocs_analyzed: usize,
1499    pub reuses: usize,
1500    pub stack_allocs: usize,
1501    pub inlines: usize,
1502    pub scratch_uses: usize,
1503    pub bytes_saved: u64,
1504}
1505/// Reason why an RC elimination is valid
1506#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1507pub enum RcElimReason {
1508    /// Variable has unique ownership
1509    UniqueOwnership,
1510    /// Variable is borrowed (no ownership transfer)
1511    Borrowed,
1512    /// Inc and dec on same variable cancel out
1513    CancelledPair,
1514    /// Value is immediately consumed
1515    ImmediateConsume,
1516    /// Value is never used after this point
1517    DeadAfterPoint,
1518}
1519/// Memory layout information for a constructor
1520#[derive(Debug, Clone, PartialEq)]
1521pub struct LayoutInfo {
1522    /// Constructor name
1523    pub(super) ctor_name: String,
1524    /// Constructor tag
1525    pub(super) ctor_tag: u32,
1526    /// Number of object (pointer) fields
1527    pub(super) obj_fields: usize,
1528    /// Number of scalar fields
1529    pub(super) scalar_fields: usize,
1530    /// Total size in words (8 bytes each)
1531    pub(super) total_words: usize,
1532}
1533impl LayoutInfo {
1534    pub(super) fn new(
1535        ctor_name: &str,
1536        ctor_tag: u32,
1537        obj_fields: usize,
1538        scalar_fields: usize,
1539    ) -> Self {
1540        LayoutInfo {
1541            ctor_name: ctor_name.to_string(),
1542            ctor_tag,
1543            obj_fields,
1544            scalar_fields,
1545            total_words: 1 + obj_fields + scalar_fields,
1546        }
1547    }
1548    /// Check if another layout is compatible for reuse
1549    pub(super) fn is_compatible_with(&self, other: &LayoutInfo) -> bool {
1550        self.total_words == other.total_words
1551    }
1552    /// Check if another layout is a subset (smaller or equal)
1553    pub(super) fn fits_in(&self, other: &LayoutInfo) -> bool {
1554        self.total_words <= other.total_words
1555    }
1556}
1557/// A lifetime scope
1558#[derive(Debug, Clone)]
1559pub(crate) struct LifetimeScope {
1560    /// Variables defined in this scope
1561    pub(super) defined: HashSet<LcnfVarId>,
1562    /// Variables borrowed in this scope
1563    pub(super) borrowed: HashSet<LcnfVarId>,
1564}
1565/// Reuse analysis builder v2
1566#[allow(dead_code)]
1567#[derive(Debug, Default)]
1568pub struct ReusePassBuilderV2 {
1569    pub config: ReuseGlobalConfig,
1570    pub ipa_summary: ReuseIPASummary,
1571    pub coloring: ReuseColoring,
1572    pub liveness: ReuseLiveness,
1573    pub stats: ReuseCodeStats,
1574}
1575#[allow(dead_code)]
1576impl ReusePassBuilderV2 {
1577    pub fn new() -> Self {
1578        Self::default()
1579    }
1580    pub fn with_config(mut self, cfg: ReuseGlobalConfig) -> Self {
1581        self.config = cfg;
1582        self
1583    }
1584    pub fn report(&self) -> String {
1585        format!("{}", self.stats)
1586    }
1587    pub fn ipa_report(&self) -> String {
1588        format!("{}", self.ipa_summary)
1589    }
1590}
1591#[allow(dead_code)]
1592pub struct ORPassRegistry {
1593    pub(super) configs: Vec<ORPassConfig>,
1594    pub(super) stats: std::collections::HashMap<String, ORPassStats>,
1595}
1596impl ORPassRegistry {
1597    #[allow(dead_code)]
1598    pub fn new() -> Self {
1599        ORPassRegistry {
1600            configs: Vec::new(),
1601            stats: std::collections::HashMap::new(),
1602        }
1603    }
1604    #[allow(dead_code)]
1605    pub fn register(&mut self, config: ORPassConfig) {
1606        self.stats
1607            .insert(config.pass_name.clone(), ORPassStats::new());
1608        self.configs.push(config);
1609    }
1610    #[allow(dead_code)]
1611    pub fn enabled_passes(&self) -> Vec<&ORPassConfig> {
1612        self.configs.iter().filter(|c| c.enabled).collect()
1613    }
1614    #[allow(dead_code)]
1615    pub fn get_stats(&self, name: &str) -> Option<&ORPassStats> {
1616        self.stats.get(name)
1617    }
1618    #[allow(dead_code)]
1619    pub fn total_passes(&self) -> usize {
1620        self.configs.len()
1621    }
1622    #[allow(dead_code)]
1623    pub fn enabled_count(&self) -> usize {
1624        self.enabled_passes().len()
1625    }
1626    #[allow(dead_code)]
1627    pub fn update_stats(&mut self, name: &str, changes: u64, time_ms: u64, iter: u32) {
1628        if let Some(stats) = self.stats.get_mut(name) {
1629            stats.record_run(changes, time_ms, iter);
1630        }
1631    }
1632}
1633/// Reuse analysis decision map
1634#[allow(dead_code)]
1635#[derive(Debug, Default)]
1636pub struct ReuseDecisionMap {
1637    pub decisions: std::collections::HashMap<u32, ReuseDecision>,
1638    pub reuse_count: usize,
1639    pub stack_alloc_count: usize,
1640}
1641#[allow(dead_code)]
1642impl ReuseDecisionMap {
1643    pub fn new() -> Self {
1644        Self::default()
1645    }
1646    pub fn set(&mut self, var: u32, decision: ReuseDecision) {
1647        match &decision {
1648            ReuseDecision::Reuse(_) => self.reuse_count += 1,
1649            ReuseDecision::StackAlloc => self.stack_alloc_count += 1,
1650            _ => {}
1651        }
1652        self.decisions.insert(var, decision);
1653    }
1654    pub fn get(&self, var: u32) -> Option<&ReuseDecision> {
1655        self.decisions.get(&var)
1656    }
1657    pub fn total_decisions(&self) -> usize {
1658        self.decisions.len()
1659    }
1660    pub fn reuse_rate(&self) -> f64 {
1661        if self.decisions.is_empty() {
1662            0.0
1663        } else {
1664            self.reuse_count as f64 / self.decisions.len() as f64
1665        }
1666    }
1667}
1668/// Reuse analysis flow-sensitive liveness
1669#[allow(dead_code)]
1670#[derive(Debug, Default)]
1671pub struct ReuseLiveness {
1672    pub live_in: std::collections::HashMap<u32, std::collections::HashSet<u32>>,
1673    pub live_out: std::collections::HashMap<u32, std::collections::HashSet<u32>>,
1674}
1675#[allow(dead_code)]
1676impl ReuseLiveness {
1677    pub fn new() -> Self {
1678        Self::default()
1679    }
1680    pub fn is_live_at(&self, block: u32, var: u32) -> bool {
1681        self.live_in
1682            .get(&block)
1683            .map(|s| s.contains(&var))
1684            .unwrap_or(false)
1685    }
1686    pub fn add_live_in(&mut self, block: u32, var: u32) {
1687        self.live_in.entry(block).or_default().insert(var);
1688    }
1689    pub fn add_live_out(&mut self, block: u32, var: u32) {
1690        self.live_out.entry(block).or_default().insert(var);
1691    }
1692}
1693/// Reuse analysis id generator
1694#[allow(dead_code)]
1695#[derive(Debug, Default)]
1696pub struct ReuseExtIdGen {
1697    pub(super) counter: u32,
1698}
1699#[allow(dead_code)]
1700impl ReuseExtIdGen {
1701    pub fn new() -> Self {
1702        Self::default()
1703    }
1704    pub fn next(&mut self) -> u32 {
1705        let id = self.counter;
1706        self.counter += 1;
1707        id
1708    }
1709}
1710/// Reuse candidate heap (priority queue by benefit)
1711#[allow(dead_code)]
1712#[derive(Debug, Default)]
1713pub struct ReuseCandidateHeap {
1714    pub items: Vec<(i64, LicmHoistCandidate)>,
1715}
1716#[allow(dead_code)]
1717#[derive(Debug, Clone)]
1718pub struct ORAnalysisCache {
1719    pub(super) entries: std::collections::HashMap<String, ORCacheEntry>,
1720    pub(super) max_size: usize,
1721    pub(super) hits: u64,
1722    pub(super) misses: u64,
1723}
1724impl ORAnalysisCache {
1725    #[allow(dead_code)]
1726    pub fn new(max_size: usize) -> Self {
1727        ORAnalysisCache {
1728            entries: std::collections::HashMap::new(),
1729            max_size,
1730            hits: 0,
1731            misses: 0,
1732        }
1733    }
1734    #[allow(dead_code)]
1735    pub fn get(&mut self, key: &str) -> Option<&ORCacheEntry> {
1736        if self.entries.contains_key(key) {
1737            self.hits += 1;
1738            self.entries.get(key)
1739        } else {
1740            self.misses += 1;
1741            None
1742        }
1743    }
1744    #[allow(dead_code)]
1745    pub fn insert(&mut self, key: String, data: Vec<u8>) {
1746        if self.entries.len() >= self.max_size {
1747            if let Some(oldest) = self.entries.keys().next().cloned() {
1748                self.entries.remove(&oldest);
1749            }
1750        }
1751        self.entries.insert(
1752            key.clone(),
1753            ORCacheEntry {
1754                key,
1755                data,
1756                timestamp: 0,
1757                valid: true,
1758            },
1759        );
1760    }
1761    #[allow(dead_code)]
1762    pub fn invalidate(&mut self, key: &str) {
1763        if let Some(entry) = self.entries.get_mut(key) {
1764            entry.valid = false;
1765        }
1766    }
1767    #[allow(dead_code)]
1768    pub fn clear(&mut self) {
1769        self.entries.clear();
1770    }
1771    #[allow(dead_code)]
1772    pub fn hit_rate(&self) -> f64 {
1773        let total = self.hits + self.misses;
1774        if total == 0 {
1775            return 0.0;
1776        }
1777        self.hits as f64 / total as f64
1778    }
1779    #[allow(dead_code)]
1780    pub fn size(&self) -> usize {
1781        self.entries.len()
1782    }
1783}
1784#[allow(dead_code)]
1785#[derive(Debug, Clone)]
1786pub struct ORDominatorTree {
1787    pub idom: Vec<Option<u32>>,
1788    pub dom_children: Vec<Vec<u32>>,
1789    pub dom_depth: Vec<u32>,
1790}
1791impl ORDominatorTree {
1792    #[allow(dead_code)]
1793    pub fn new(size: usize) -> Self {
1794        ORDominatorTree {
1795            idom: vec![None; size],
1796            dom_children: vec![Vec::new(); size],
1797            dom_depth: vec![0; size],
1798        }
1799    }
1800    #[allow(dead_code)]
1801    pub fn set_idom(&mut self, node: usize, idom: u32) {
1802        self.idom[node] = Some(idom);
1803    }
1804    #[allow(dead_code)]
1805    pub fn dominates(&self, a: usize, b: usize) -> bool {
1806        if a == b {
1807            return true;
1808        }
1809        let mut cur = b;
1810        loop {
1811            match self.idom[cur] {
1812                Some(parent) if parent as usize == a => return true,
1813                Some(parent) if parent as usize == cur => return false,
1814                Some(parent) => cur = parent as usize,
1815                None => return false,
1816            }
1817        }
1818    }
1819    #[allow(dead_code)]
1820    pub fn depth(&self, node: usize) -> u32 {
1821        self.dom_depth.get(node).copied().unwrap_or(0)
1822    }
1823}
1824/// Reuse decision
1825#[allow(dead_code)]
1826#[derive(Debug, Clone, PartialEq, Eq)]
1827pub enum ReuseDecision {
1828    Reuse(u32),
1829    NewAlloc,
1830    RcBump(u32),
1831    StackAlloc,
1832    Inline,
1833    ScratchBuffer(u32),
1834}