1use crate::bytecode::{Register, Value};
2use crate::jit;
3use crate::jit::trace::{Trace, TraceOp};
4use alloc::{format, string::ToString, vec::Vec};
5use hashbrown::HashSet;
6pub struct TraceOptimizer {
7 hoisted_constants: Vec<(Register, Value)>,
8}
9
10impl TraceOptimizer {
11 pub fn new() -> Self {
12 Self {
13 hoisted_constants: Vec::new(),
14 }
15 }
16
17 pub fn optimize(&mut self, trace: &mut Trace) -> Vec<(Register, Value)> {
18 jit::log(|| "🔧 JIT Optimizer: Starting optimization...".to_string());
19 let original_ops = trace.ops.len();
20 self.hoist_constants(trace);
21 self.unroll_loop(trace, crate::jit::UNROLL_FACTOR);
22 self.eliminate_arithmetic_moves(trace);
23 self.coalesce_registers(trace);
24 let optimized_ops = trace.ops.len();
25 let hoisted = self.hoisted_constants.len();
26 jit::log(|| {
27 format!(
28 "✨ JIT Optimizer: Optimized {} ops → {} ops, hoisted {} constants",
29 original_ops, optimized_ops, hoisted
30 )
31 });
32 self.hoisted_constants.clone()
33 }
34
35 fn hoist_constants(&mut self, trace: &mut Trace) {
36 let mut non_hoistable: HashSet<Register> = HashSet::new();
37 for op in &trace.ops {
38 if let TraceOp::CallNative { callee, .. } = op {
39 non_hoistable.insert(*callee);
40 }
41 }
42
43 let mut new_ops = Vec::new();
44 let mut already_hoisted: HashSet<Register> = HashSet::new();
45 let mut assigned: HashSet<Register> = HashSet::new();
46
47 let dest_of = |op: &TraceOp| -> Option<Register> {
48 match op {
49 TraceOp::Move { dest, .. }
50 | TraceOp::Add { dest, .. }
51 | TraceOp::Sub { dest, .. }
52 | TraceOp::Mul { dest, .. }
53 | TraceOp::Div { dest, .. }
54 | TraceOp::Mod { dest, .. }
55 | TraceOp::Neg { dest, .. }
56 | TraceOp::Eq { dest, .. }
57 | TraceOp::Ne { dest, .. }
58 | TraceOp::Lt { dest, .. }
59 | TraceOp::Le { dest, .. }
60 | TraceOp::Gt { dest, .. }
61 | TraceOp::Ge { dest, .. }
62 | TraceOp::And { dest, .. }
63 | TraceOp::Or { dest, .. }
64 | TraceOp::Not { dest, .. }
65 | TraceOp::Concat { dest, .. }
66 | TraceOp::GetIndex { dest, .. }
67 | TraceOp::ArrayLen { dest, .. }
68 | TraceOp::CallMethod { dest, .. }
69 | TraceOp::GetField { dest, .. }
70 | TraceOp::NewStruct { dest, .. }
71 | TraceOp::CallNative { dest, .. } => Some(*dest),
72 _ => None,
73 }
74 };
75
76 for op in trace.ops.drain(..) {
77 match op {
78 TraceOp::LoadConst { dest, value } => {
79 if non_hoistable.contains(&dest)
80 || already_hoisted.contains(&dest)
81 || assigned.contains(&dest)
82 {
83 new_ops.push(TraceOp::LoadConst { dest, value });
84 } else {
85 self.hoisted_constants.push((dest, value));
86 already_hoisted.insert(dest);
87 }
88 assigned.insert(dest);
89 }
90
91 other => {
92 if let Some(dest) = dest_of(&other) {
93 assigned.insert(dest);
94 }
95 new_ops.push(other);
96 }
97 }
98 }
99
100 trace.ops = new_ops;
101 }
102
103 fn eliminate_arithmetic_moves(&mut self, trace: &mut Trace) {
104 let mut new_ops = Vec::new();
105 let mut i = 0;
106 while i < trace.ops.len() {
107 if i + 1 < trace.ops.len() {
108 let current = &trace.ops[i];
109 let next = &trace.ops[i + 1];
110 if let Some((_, final_dest)) = self.match_arithmetic_move(current, next) {
111 let mut rewritten = current.clone();
112 self.rewrite_arithmetic_dest(&mut rewritten, final_dest);
113 new_ops.push(rewritten);
114 i += 2;
115 continue;
116 }
117 }
118
119 new_ops.push(trace.ops[i].clone());
120 i += 1;
121 }
122
123 trace.ops = new_ops;
124 }
125
126 fn match_arithmetic_move(&self, op1: &TraceOp, op2: &TraceOp) -> Option<(Register, Register)> {
127 let arith_dest = match op1 {
128 TraceOp::Add { dest, .. }
129 | TraceOp::Sub { dest, .. }
130 | TraceOp::Mul { dest, .. }
131 | TraceOp::Div { dest, .. }
132 | TraceOp::Mod { dest, .. } => *dest,
133 _ => return None,
134 };
135 if let TraceOp::Move {
136 dest: move_dest,
137 src,
138 } = op2
139 {
140 if *src == arith_dest {
141 return Some((arith_dest, *move_dest));
142 }
143 }
144
145 None
146 }
147
148 fn rewrite_arithmetic_dest(&self, op: &mut TraceOp, new_dest: Register) {
149 match op {
150 TraceOp::Add { dest, .. }
151 | TraceOp::Sub { dest, .. }
152 | TraceOp::Mul { dest, .. }
153 | TraceOp::Div { dest, .. }
154 | TraceOp::Mod { dest, .. } => {
155 *dest = new_dest;
156 }
157
158 _ => {}
159 }
160 }
161
162 fn unroll_loop(&mut self, trace: &mut Trace, factor: usize) {
163 if factor <= 1 || trace.ops.is_empty() {
164 return;
165 }
166
167 let loop_condition_op = trace.ops.iter().find_map(|op| match op {
168 TraceOp::Le { dest, .. }
169 | TraceOp::Lt { dest, .. }
170 | TraceOp::Ge { dest, .. }
171 | TraceOp::Gt { dest, .. } => Some((op.clone(), *dest)),
172 _ => None,
173 });
174 if loop_condition_op.is_none() {
175 return;
176 }
177
178 let (loop_cmp_op, cond_reg) = loop_condition_op.unwrap();
179 let original_ops = trace.ops.clone();
180 let mut new_ops = Vec::new();
181 new_ops.extend(original_ops.iter().cloned());
182 for _ in 1..factor {
183 new_ops.push(loop_cmp_op.clone());
184 new_ops.push(TraceOp::GuardLoopContinue {
185 condition_register: cond_reg,
186 bailout_ip: trace.start_ip,
187 });
188 for op in &original_ops {
189 if !matches!(
190 op,
191 TraceOp::Le { .. }
192 | TraceOp::Lt { .. }
193 | TraceOp::Ge { .. }
194 | TraceOp::Gt { .. }
195 ) {
196 new_ops.push(op.clone());
197 }
198 }
199 }
200
201 trace.ops = new_ops;
202 }
203
204 fn coalesce_registers(&mut self, _trace: &mut Trace) {}
205}
206
207impl Default for TraceOptimizer {
208 fn default() -> Self {
209 Self::new()
210 }
211}