1use crate::bytecode::{Register, Value};
2use crate::jit;
3use crate::jit::trace::{Trace, TraceOp};
4use alloc::{format, string::ToString, vec::Vec};
5use hashbrown::HashSet;
6pub struct TraceOptimizer {
7 hoisted_constants: Vec<(Register, Value)>,
8}
9
10impl TraceOptimizer {
11 pub fn new() -> Self {
12 Self {
13 hoisted_constants: Vec::new(),
14 }
15 }
16
17 pub fn optimize(&mut self, trace: &mut Trace) -> Vec<(Register, Value)> {
18 jit::log(|| "🔧 JIT Optimizer: Starting optimization...".to_string());
19 let original_ops = trace.ops.len();
20 self.hoist_constants(trace);
21 self.unroll_loop(trace, crate::jit::UNROLL_FACTOR);
22 self.eliminate_arithmetic_moves(trace);
23 self.coalesce_registers(trace);
24 let optimized_ops = trace.ops.len();
25 let hoisted = self.hoisted_constants.len();
26 jit::log(|| {
27 format!(
28 "✨ JIT Optimizer: Optimized {} ops → {} ops, hoisted {} constants",
29 original_ops, optimized_ops, hoisted
30 )
31 });
32 self.hoisted_constants.clone()
33 }
34
35 fn hoist_constants(&mut self, trace: &mut Trace) {
36 let mut non_hoistable: HashSet<Register> = HashSet::new();
37 for op in &trace.ops {
38 match op {
39 TraceOp::CallNative { callee, .. }
40 | TraceOp::CallFunction { callee, .. }
41 | TraceOp::InlineCall { callee, .. } => {
42 non_hoistable.insert(*callee);
43 }
44 _ => {}
45 }
46 }
47
48 let mut new_ops = Vec::new();
49 let mut already_hoisted: HashSet<Register> = HashSet::new();
50 let mut assigned: HashSet<Register> = HashSet::new();
51
52 let dest_of = |op: &TraceOp| -> Option<Register> {
53 match op {
54 TraceOp::Move { dest, .. }
55 | TraceOp::Add { dest, .. }
56 | TraceOp::Sub { dest, .. }
57 | TraceOp::Mul { dest, .. }
58 | TraceOp::Div { dest, .. }
59 | TraceOp::Mod { dest, .. }
60 | TraceOp::Neg { dest, .. }
61 | TraceOp::Eq { dest, .. }
62 | TraceOp::Ne { dest, .. }
63 | TraceOp::Lt { dest, .. }
64 | TraceOp::Le { dest, .. }
65 | TraceOp::Gt { dest, .. }
66 | TraceOp::Ge { dest, .. }
67 | TraceOp::And { dest, .. }
68 | TraceOp::Or { dest, .. }
69 | TraceOp::Not { dest, .. }
70 | TraceOp::Concat { dest, .. }
71 | TraceOp::GetIndex { dest, .. }
72 | TraceOp::ArrayLen { dest, .. }
73 | TraceOp::CallMethod { dest, .. }
74 | TraceOp::GetField { dest, .. }
75 | TraceOp::NewArray { dest, .. }
76 | TraceOp::NewStruct { dest, .. }
77 | TraceOp::NewEnumUnit { dest, .. }
78 | TraceOp::NewEnumVariant { dest, .. }
79 | TraceOp::IsEnumVariant { dest, .. }
80 | TraceOp::GetEnumValue { dest, .. }
81 | TraceOp::CallNative { dest, .. }
82 | TraceOp::CallFunction { dest, .. }
83 | TraceOp::InlineCall { dest, .. } => Some(*dest),
84 _ => None,
85 }
86 };
87
88 for op in trace.ops.drain(..) {
89 match op {
90 TraceOp::LoadConst { dest, value } => {
91 if non_hoistable.contains(&dest)
92 || already_hoisted.contains(&dest)
93 || assigned.contains(&dest)
94 {
95 new_ops.push(TraceOp::LoadConst { dest, value });
96 } else {
97 self.hoisted_constants.push((dest, value));
98 already_hoisted.insert(dest);
99 }
100 assigned.insert(dest);
101 }
102
103 other => {
104 if let Some(dest) = dest_of(&other) {
105 assigned.insert(dest);
106 }
107 new_ops.push(other);
108 }
109 }
110 }
111
112 trace.ops = new_ops;
113 }
114
115 fn eliminate_arithmetic_moves(&mut self, trace: &mut Trace) {
116 let mut new_ops = Vec::new();
117 let mut i = 0;
118 while i < trace.ops.len() {
119 if i + 1 < trace.ops.len() {
120 let current = &trace.ops[i];
121 let next = &trace.ops[i + 1];
122 if let Some((_, final_dest)) = self.match_arithmetic_move(current, next) {
123 let mut rewritten = current.clone();
124 self.rewrite_arithmetic_dest(&mut rewritten, final_dest);
125 new_ops.push(rewritten);
126 i += 2;
127 continue;
128 }
129 }
130
131 new_ops.push(trace.ops[i].clone());
132 i += 1;
133 }
134
135 trace.ops = new_ops;
136 }
137
138 fn match_arithmetic_move(&self, op1: &TraceOp, op2: &TraceOp) -> Option<(Register, Register)> {
139 let arith_dest = match op1 {
140 TraceOp::Add { dest, .. }
141 | TraceOp::Sub { dest, .. }
142 | TraceOp::Mul { dest, .. }
143 | TraceOp::Div { dest, .. }
144 | TraceOp::Mod { dest, .. } => *dest,
145 _ => return None,
146 };
147 if let TraceOp::Move {
148 dest: move_dest,
149 src,
150 } = op2
151 {
152 if *src == arith_dest {
153 return Some((arith_dest, *move_dest));
154 }
155 }
156
157 None
158 }
159
160 fn rewrite_arithmetic_dest(&self, op: &mut TraceOp, new_dest: Register) {
161 match op {
162 TraceOp::Add { dest, .. }
163 | TraceOp::Sub { dest, .. }
164 | TraceOp::Mul { dest, .. }
165 | TraceOp::Div { dest, .. }
166 | TraceOp::Mod { dest, .. } => {
167 *dest = new_dest;
168 }
169
170 _ => {}
171 }
172 }
173
174 fn unroll_loop(&mut self, trace: &mut Trace, factor: usize) {
175 if factor <= 1 || trace.ops.is_empty() {
176 return;
177 }
178
179 if trace
180 .ops
181 .iter()
182 .any(|op| matches!(op, TraceOp::InlineCall { .. }))
183 {
184 return;
185 }
186
187 let loop_condition_op = trace.ops.iter().find_map(|op| match op {
188 TraceOp::Le { dest, .. }
189 | TraceOp::Lt { dest, .. }
190 | TraceOp::Ge { dest, .. }
191 | TraceOp::Gt { dest, .. } => Some((op.clone(), *dest)),
192 _ => None,
193 });
194 if loop_condition_op.is_none() {
195 return;
196 }
197
198 let (loop_cmp_op, cond_reg) = loop_condition_op.unwrap();
199 let original_ops = trace.ops.clone();
200 let mut new_ops = Vec::new();
201 new_ops.extend(original_ops.iter().cloned());
202 for _ in 1..factor {
203 new_ops.push(loop_cmp_op.clone());
204 new_ops.push(TraceOp::GuardLoopContinue {
205 condition_register: cond_reg,
206 expect_truthy: true,
207 bailout_ip: trace.start_ip,
208 });
209 for op in &original_ops {
210 if !matches!(
211 op,
212 TraceOp::Le { .. }
213 | TraceOp::Lt { .. }
214 | TraceOp::Ge { .. }
215 | TraceOp::Gt { .. }
216 ) {
217 new_ops.push(op.clone());
218 }
219 }
220 }
221
222 trace.ops = new_ops;
223 }
224
225 fn coalesce_registers(&mut self, _trace: &mut Trace) {}
226}
227
228impl Default for TraceOptimizer {
229 fn default() -> Self {
230 Self::new()
231 }
232}