1use crate::bytecode::{Register, Value};
2use crate::jit;
3use crate::jit::trace::{Trace, TraceOp};
4use alloc::{format, string::ToString, vec::Vec};
5use hashbrown::HashSet;
6pub struct TraceOptimizer {
7 hoisted_constants: Vec<(Register, Value)>,
8}
9
10impl TraceOptimizer {
11 pub fn new() -> Self {
12 Self {
13 hoisted_constants: Vec::new(),
14 }
15 }
16
17 pub fn optimize(&mut self, trace: &mut Trace) -> Vec<(Register, Value)> {
18 jit::log(|| "🔧 JIT Optimizer: Starting optimization...".to_string());
19 let original_ops = trace.ops.len();
20 self.hoist_constants(trace);
21 self.unroll_loop(trace, crate::jit::UNROLL_FACTOR);
22 self.eliminate_arithmetic_moves(trace);
23 self.coalesce_registers(trace);
24 let optimized_ops = trace.ops.len();
25 let hoisted = self.hoisted_constants.len();
26 jit::log(|| {
27 format!(
28 "✨ JIT Optimizer: Optimized {} ops → {} ops, hoisted {} constants",
29 original_ops, optimized_ops, hoisted
30 )
31 });
32 self.hoisted_constants.clone()
33 }
34
35 fn hoist_constants(&mut self, trace: &mut Trace) {
36 let mut non_hoistable: HashSet<Register> = HashSet::new();
37 for op in &trace.ops {
38 match op {
39 TraceOp::CallNative { callee, .. }
40 | TraceOp::CallFunction { callee, .. }
41 | TraceOp::InlineCall { callee, .. } => {
42 non_hoistable.insert(*callee);
43 }
44 _ => {}
45 }
46 }
47
48 let mut new_ops = Vec::new();
49 let mut already_hoisted: HashSet<Register> = HashSet::new();
50 let mut assigned: HashSet<Register> = HashSet::new();
51
52 let dest_of = |op: &TraceOp| -> Option<Register> {
53 match op {
54 TraceOp::Move { dest, .. }
55 | TraceOp::Add { dest, .. }
56 | TraceOp::Sub { dest, .. }
57 | TraceOp::Mul { dest, .. }
58 | TraceOp::Div { dest, .. }
59 | TraceOp::Mod { dest, .. }
60 | TraceOp::Neg { dest, .. }
61 | TraceOp::Eq { dest, .. }
62 | TraceOp::Ne { dest, .. }
63 | TraceOp::Lt { dest, .. }
64 | TraceOp::Le { dest, .. }
65 | TraceOp::Gt { dest, .. }
66 | TraceOp::Ge { dest, .. }
67 | TraceOp::And { dest, .. }
68 | TraceOp::Or { dest, .. }
69 | TraceOp::Not { dest, .. }
70 | TraceOp::Concat { dest, .. }
71 | TraceOp::GetIndex { dest, .. }
72 | TraceOp::ArrayLen { dest, .. }
73 | TraceOp::CallMethod { dest, .. }
74 | TraceOp::GetField { dest, .. }
75 | TraceOp::NewStruct { dest, .. }
76 | TraceOp::NewEnumUnit { dest, .. }
77 | TraceOp::NewEnumVariant { dest, .. }
78 | TraceOp::IsEnumVariant { dest, .. }
79 | TraceOp::GetEnumValue { dest, .. }
80 | TraceOp::CallNative { dest, .. }
81 | TraceOp::CallFunction { dest, .. }
82 | TraceOp::InlineCall { dest, .. } => Some(*dest),
83 _ => None,
84 }
85 };
86
87 for op in trace.ops.drain(..) {
88 match op {
89 TraceOp::LoadConst { dest, value } => {
90 if non_hoistable.contains(&dest)
91 || already_hoisted.contains(&dest)
92 || assigned.contains(&dest)
93 {
94 new_ops.push(TraceOp::LoadConst { dest, value });
95 } else {
96 self.hoisted_constants.push((dest, value));
97 already_hoisted.insert(dest);
98 }
99 assigned.insert(dest);
100 }
101
102 other => {
103 if let Some(dest) = dest_of(&other) {
104 assigned.insert(dest);
105 }
106 new_ops.push(other);
107 }
108 }
109 }
110
111 trace.ops = new_ops;
112 }
113
114 fn eliminate_arithmetic_moves(&mut self, trace: &mut Trace) {
115 let mut new_ops = Vec::new();
116 let mut i = 0;
117 while i < trace.ops.len() {
118 if i + 1 < trace.ops.len() {
119 let current = &trace.ops[i];
120 let next = &trace.ops[i + 1];
121 if let Some((_, final_dest)) = self.match_arithmetic_move(current, next) {
122 let mut rewritten = current.clone();
123 self.rewrite_arithmetic_dest(&mut rewritten, final_dest);
124 new_ops.push(rewritten);
125 i += 2;
126 continue;
127 }
128 }
129
130 new_ops.push(trace.ops[i].clone());
131 i += 1;
132 }
133
134 trace.ops = new_ops;
135 }
136
137 fn match_arithmetic_move(&self, op1: &TraceOp, op2: &TraceOp) -> Option<(Register, Register)> {
138 let arith_dest = match op1 {
139 TraceOp::Add { dest, .. }
140 | TraceOp::Sub { dest, .. }
141 | TraceOp::Mul { dest, .. }
142 | TraceOp::Div { dest, .. }
143 | TraceOp::Mod { dest, .. } => *dest,
144 _ => return None,
145 };
146 if let TraceOp::Move {
147 dest: move_dest,
148 src,
149 } = op2
150 {
151 if *src == arith_dest {
152 return Some((arith_dest, *move_dest));
153 }
154 }
155
156 None
157 }
158
159 fn rewrite_arithmetic_dest(&self, op: &mut TraceOp, new_dest: Register) {
160 match op {
161 TraceOp::Add { dest, .. }
162 | TraceOp::Sub { dest, .. }
163 | TraceOp::Mul { dest, .. }
164 | TraceOp::Div { dest, .. }
165 | TraceOp::Mod { dest, .. } => {
166 *dest = new_dest;
167 }
168
169 _ => {}
170 }
171 }
172
173 fn unroll_loop(&mut self, trace: &mut Trace, factor: usize) {
174 if factor <= 1 || trace.ops.is_empty() {
175 return;
176 }
177
178 if trace
179 .ops
180 .iter()
181 .any(|op| matches!(op, TraceOp::InlineCall { .. }))
182 {
183 return;
184 }
185
186 let loop_condition_op = trace.ops.iter().find_map(|op| match op {
187 TraceOp::Le { dest, .. }
188 | TraceOp::Lt { dest, .. }
189 | TraceOp::Ge { dest, .. }
190 | TraceOp::Gt { dest, .. } => Some((op.clone(), *dest)),
191 _ => None,
192 });
193 if loop_condition_op.is_none() {
194 return;
195 }
196
197 let (loop_cmp_op, cond_reg) = loop_condition_op.unwrap();
198 let original_ops = trace.ops.clone();
199 let mut new_ops = Vec::new();
200 new_ops.extend(original_ops.iter().cloned());
201 for _ in 1..factor {
202 new_ops.push(loop_cmp_op.clone());
203 new_ops.push(TraceOp::GuardLoopContinue {
204 condition_register: cond_reg,
205 expect_truthy: true,
206 bailout_ip: trace.start_ip,
207 });
208 for op in &original_ops {
209 if !matches!(
210 op,
211 TraceOp::Le { .. }
212 | TraceOp::Lt { .. }
213 | TraceOp::Ge { .. }
214 | TraceOp::Gt { .. }
215 ) {
216 new_ops.push(op.clone());
217 }
218 }
219 }
220
221 trace.ops = new_ops;
222 }
223
224 fn coalesce_registers(&mut self, _trace: &mut Trace) {}
225}
226
227impl Default for TraceOptimizer {
228 fn default() -> Self {
229 Self::new()
230 }
231}