1use crate::lcnf::*;
6use std::collections::HashMap;
7
8use super::functions::*;
9use std::collections::{HashSet, VecDeque};
10
11#[allow(dead_code)]
13#[derive(Debug, Clone, Default)]
14pub struct SRExtLiveness {
15 pub live_in: Vec<Vec<usize>>,
16 pub live_out: Vec<Vec<usize>>,
17 pub defs: Vec<Vec<usize>>,
18 pub uses: Vec<Vec<usize>>,
19}
20impl SRExtLiveness {
21 #[allow(dead_code)]
22 pub fn new(n: usize) -> Self {
23 Self {
24 live_in: vec![Vec::new(); n],
25 live_out: vec![Vec::new(); n],
26 defs: vec![Vec::new(); n],
27 uses: vec![Vec::new(); n],
28 }
29 }
30 #[allow(dead_code)]
31 pub fn live_in(&self, b: usize, v: usize) -> bool {
32 self.live_in.get(b).map(|s| s.contains(&v)).unwrap_or(false)
33 }
34 #[allow(dead_code)]
35 pub fn live_out(&self, b: usize, v: usize) -> bool {
36 self.live_out
37 .get(b)
38 .map(|s| s.contains(&v))
39 .unwrap_or(false)
40 }
41 #[allow(dead_code)]
42 pub fn add_def(&mut self, b: usize, v: usize) {
43 if let Some(s) = self.defs.get_mut(b) {
44 if !s.contains(&v) {
45 s.push(v);
46 }
47 }
48 }
49 #[allow(dead_code)]
50 pub fn add_use(&mut self, b: usize, v: usize) {
51 if let Some(s) = self.uses.get_mut(b) {
52 if !s.contains(&v) {
53 s.push(v);
54 }
55 }
56 }
57 #[allow(dead_code)]
58 pub fn var_is_used_in_block(&self, b: usize, v: usize) -> bool {
59 self.uses.get(b).map(|s| s.contains(&v)).unwrap_or(false)
60 }
61 #[allow(dead_code)]
62 pub fn var_is_def_in_block(&self, b: usize, v: usize) -> bool {
63 self.defs.get(b).map(|s| s.contains(&v)).unwrap_or(false)
64 }
65}
66#[allow(dead_code)]
67pub struct OSConstantFoldingHelper;
68impl OSConstantFoldingHelper {
69 #[allow(dead_code)]
70 pub fn fold_add_i64(a: i64, b: i64) -> Option<i64> {
71 a.checked_add(b)
72 }
73 #[allow(dead_code)]
74 pub fn fold_sub_i64(a: i64, b: i64) -> Option<i64> {
75 a.checked_sub(b)
76 }
77 #[allow(dead_code)]
78 pub fn fold_mul_i64(a: i64, b: i64) -> Option<i64> {
79 a.checked_mul(b)
80 }
81 #[allow(dead_code)]
82 pub fn fold_div_i64(a: i64, b: i64) -> Option<i64> {
83 if b == 0 {
84 None
85 } else {
86 a.checked_div(b)
87 }
88 }
89 #[allow(dead_code)]
90 pub fn fold_add_f64(a: f64, b: f64) -> f64 {
91 a + b
92 }
93 #[allow(dead_code)]
94 pub fn fold_mul_f64(a: f64, b: f64) -> f64 {
95 a * b
96 }
97 #[allow(dead_code)]
98 pub fn fold_neg_i64(a: i64) -> Option<i64> {
99 a.checked_neg()
100 }
101 #[allow(dead_code)]
102 pub fn fold_not_bool(a: bool) -> bool {
103 !a
104 }
105 #[allow(dead_code)]
106 pub fn fold_and_bool(a: bool, b: bool) -> bool {
107 a && b
108 }
109 #[allow(dead_code)]
110 pub fn fold_or_bool(a: bool, b: bool) -> bool {
111 a || b
112 }
113 #[allow(dead_code)]
114 pub fn fold_shl_i64(a: i64, b: u32) -> Option<i64> {
115 a.checked_shl(b)
116 }
117 #[allow(dead_code)]
118 pub fn fold_shr_i64(a: i64, b: u32) -> Option<i64> {
119 a.checked_shr(b)
120 }
121 #[allow(dead_code)]
122 pub fn fold_rem_i64(a: i64, b: i64) -> Option<i64> {
123 if b == 0 {
124 None
125 } else {
126 Some(a % b)
127 }
128 }
129 #[allow(dead_code)]
130 pub fn fold_bitand_i64(a: i64, b: i64) -> i64 {
131 a & b
132 }
133 #[allow(dead_code)]
134 pub fn fold_bitor_i64(a: i64, b: i64) -> i64 {
135 a | b
136 }
137 #[allow(dead_code)]
138 pub fn fold_bitxor_i64(a: i64, b: i64) -> i64 {
139 a ^ b
140 }
141 #[allow(dead_code)]
142 pub fn fold_bitnot_i64(a: i64) -> i64 {
143 !a
144 }
145}
146#[allow(dead_code)]
148#[derive(Debug, Clone)]
149pub struct SRExtDomTree {
150 pub(super) idom: Vec<Option<usize>>,
151 pub(super) children: Vec<Vec<usize>>,
152 pub(super) depth: Vec<usize>,
153}
154impl SRExtDomTree {
155 #[allow(dead_code)]
156 pub fn new(n: usize) -> Self {
157 Self {
158 idom: vec![None; n],
159 children: vec![Vec::new(); n],
160 depth: vec![0; n],
161 }
162 }
163 #[allow(dead_code)]
164 pub fn set_idom(&mut self, node: usize, dom: usize) {
165 if node < self.idom.len() {
166 self.idom[node] = Some(dom);
167 if dom < self.children.len() {
168 self.children[dom].push(node);
169 }
170 self.depth[node] = if dom < self.depth.len() {
171 self.depth[dom] + 1
172 } else {
173 1
174 };
175 }
176 }
177 #[allow(dead_code)]
178 pub fn dominates(&self, a: usize, mut b: usize) -> bool {
179 if a == b {
180 return true;
181 }
182 let n = self.idom.len();
183 for _ in 0..n {
184 match self.idom.get(b).copied().flatten() {
185 None => return false,
186 Some(p) if p == a => return true,
187 Some(p) if p == b => return false,
188 Some(p) => b = p,
189 }
190 }
191 false
192 }
193 #[allow(dead_code)]
194 pub fn children_of(&self, n: usize) -> &[usize] {
195 self.children.get(n).map(|v| v.as_slice()).unwrap_or(&[])
196 }
197 #[allow(dead_code)]
198 pub fn depth_of(&self, n: usize) -> usize {
199 self.depth.get(n).copied().unwrap_or(0)
200 }
201 #[allow(dead_code)]
202 pub fn lca(&self, mut a: usize, mut b: usize) -> usize {
203 let n = self.idom.len();
204 for _ in 0..(2 * n) {
205 if a == b {
206 return a;
207 }
208 if self.depth_of(a) > self.depth_of(b) {
209 a = self.idom.get(a).and_then(|x| *x).unwrap_or(a);
210 } else {
211 b = self.idom.get(b).and_then(|x| *x).unwrap_or(b);
212 }
213 }
214 0
215 }
216}
217#[derive(Clone, PartialEq, Eq, Debug)]
219pub enum StrengthReduceRule {
220 MulByPow2(u32),
222 DivByPow2(u32),
224 ModByPow2(u32),
226 MulByConstant(u64),
228 DivByConstant(u64),
230 Pow2Const,
232 Pow3Const,
234 NegToSub,
236 AddSubToInc,
238}
239pub struct StrengthReductionPass {
248 pub(super) config: StrengthConfig,
249 pub(super) report: StrengthReport,
250 pub(super) next_id: u64,
252}
253impl StrengthReductionPass {
254 pub fn new(config: StrengthConfig) -> Self {
256 StrengthReductionPass {
257 config,
258 report: StrengthReport::default(),
259 next_id: 100_000,
260 }
261 }
262 pub(super) fn fresh_var(&mut self) -> LcnfVarId {
264 let id = self.next_id;
265 self.next_id += 1;
266 LcnfVarId(id)
267 }
268 pub fn run(&mut self, decls: &mut [LcnfFunDecl]) {
270 for decl in decls.iter_mut() {
271 let ivs = self.detect_induction_vars(decl);
272 let new_body = self.reduce_expr_with_ivs(&decl.body.clone(), &ivs);
273 decl.body = new_body;
274 }
275 }
276 pub fn reduce_expr(&mut self, expr: &LcnfExpr) -> LcnfExpr {
278 self.reduce_expr_with_ivs(expr, &[])
279 }
280 pub(super) fn reduce_expr_with_ivs(
281 &mut self,
282 expr: &LcnfExpr,
283 _ivs: &[InductionVariable],
284 ) -> LcnfExpr {
285 match expr {
286 LcnfExpr::Let {
287 id,
288 name,
289 ty,
290 value,
291 body,
292 } => {
293 let (new_value, prefix) = self.reduce_let_value(value, *id, name, ty);
294 let new_body = self.reduce_expr_with_ivs(body, _ivs);
295 let inner = LcnfExpr::Let {
296 id: *id,
297 name: name.clone(),
298 ty: ty.clone(),
299 value: new_value,
300 body: Box::new(new_body),
301 };
302 prefix
303 .into_iter()
304 .rev()
305 .fold(inner, |acc, (pid, pname, pty, pval)| LcnfExpr::Let {
306 id: pid,
307 name: pname,
308 ty: pty,
309 value: pval,
310 body: Box::new(acc),
311 })
312 }
313 LcnfExpr::Case {
314 scrutinee,
315 scrutinee_ty,
316 alts,
317 default,
318 } => {
319 let new_alts = alts
320 .iter()
321 .map(|alt| LcnfAlt {
322 ctor_name: alt.ctor_name.clone(),
323 ctor_tag: alt.ctor_tag,
324 params: alt.params.clone(),
325 body: self.reduce_expr_with_ivs(&alt.body, _ivs),
326 })
327 .collect();
328 let new_default = default
329 .as_ref()
330 .map(|d| Box::new(self.reduce_expr_with_ivs(d, _ivs)));
331 LcnfExpr::Case {
332 scrutinee: *scrutinee,
333 scrutinee_ty: scrutinee_ty.clone(),
334 alts: new_alts,
335 default: new_default,
336 }
337 }
338 LcnfExpr::Return(_) | LcnfExpr::Unreachable | LcnfExpr::TailCall(_, _) => expr.clone(),
339 }
340 }
341 #[allow(clippy::too_many_arguments)]
347 pub(super) fn reduce_let_value(
348 &mut self,
349 value: &LcnfLetValue,
350 _id: LcnfVarId,
351 _name: &str,
352 ty: &LcnfType,
353 ) -> (
354 LcnfLetValue,
355 Vec<(LcnfVarId, String, LcnfType, LcnfLetValue)>,
356 ) {
357 let prefix: Vec<(LcnfVarId, String, LcnfType, LcnfLetValue)> = vec![];
358 match value {
359 LcnfLetValue::App(func, args) => {
360 if let Some(rule) = self.match_rule(func, args) {
361 return self.apply_rule(&rule, func, args, ty);
362 }
363 (value.clone(), prefix)
364 }
365 _ => (value.clone(), prefix),
366 }
367 }
368 pub(super) fn match_rule(
370 &self,
371 func: &LcnfArg,
372 args: &[LcnfArg],
373 ) -> Option<StrengthReduceRule> {
374 let fname = match func {
375 LcnfArg::Lit(LcnfLit::Str(s)) => s.as_str(),
376 _ => return None,
377 };
378 match fname {
379 "mul" if args.len() == 2 => {
380 let c = const_arg(&args[1]).or_else(|| const_arg(&args[0]))?;
381 if c == 0 {
382 return None;
383 }
384 if c == 1 {
385 return None;
386 }
387 if is_power_of_two(c) {
388 let n = log2(c);
389 Some(StrengthReduceRule::MulByPow2(n))
390 } else {
391 Some(StrengthReduceRule::MulByConstant(c))
392 }
393 }
394 "div" if args.len() == 2 => {
395 let c = const_arg(&args[1])?;
396 if c == 0 {
397 return None;
398 }
399 if is_power_of_two(c) {
400 let n = log2(c);
401 Some(StrengthReduceRule::DivByPow2(n))
402 } else if self.config.optimize_div {
403 Some(StrengthReduceRule::DivByConstant(c))
404 } else {
405 None
406 }
407 }
408 "mod" if args.len() == 2 => {
409 let c = const_arg(&args[1])?;
410 if c == 0 {
411 return None;
412 }
413 if is_power_of_two(c) {
414 let n = log2(c);
415 Some(StrengthReduceRule::ModByPow2(n))
416 } else {
417 None
418 }
419 }
420 "pow" if args.len() == 2 => match const_arg(&args[1]) {
421 Some(2) => Some(StrengthReduceRule::Pow2Const),
422 Some(3) => Some(StrengthReduceRule::Pow3Const),
423 _ => None,
424 },
425 "sub" if args.len() == 2 => {
426 if let LcnfArg::Lit(LcnfLit::Nat(0)) = &args[0] {
427 Some(StrengthReduceRule::NegToSub)
428 } else {
429 None
430 }
431 }
432 "add" if args.len() == 2 && self.config.optimize_inc => {
433 let c = const_arg(&args[1]).or_else(|| const_arg(&args[0]))?;
434 if c == 1 {
435 Some(StrengthReduceRule::AddSubToInc)
436 } else {
437 None
438 }
439 }
440 _ => None,
441 }
442 }
443 #[allow(clippy::too_many_arguments)]
445 pub(super) fn apply_rule(
446 &mut self,
447 rule: &StrengthReduceRule,
448 func: &LcnfArg,
449 args: &[LcnfArg],
450 ty: &LcnfType,
451 ) -> (
452 LcnfLetValue,
453 Vec<(LcnfVarId, String, LcnfType, LcnfLetValue)>,
454 ) {
455 let empty: Vec<(LcnfVarId, String, LcnfType, LcnfLetValue)> = vec![];
456 let var_arg = var_arg_of(func, args);
457 match rule {
458 StrengthReduceRule::MulByPow2(n) => {
459 self.report.mul_reduced += 1;
460 let shift_val = LcnfLetValue::App(
461 LcnfArg::Lit(LcnfLit::Str("shl".into())),
462 vec![var_arg, LcnfArg::Lit(LcnfLit::Nat(*n as u64))],
463 );
464 (shift_val, empty)
465 }
466 StrengthReduceRule::DivByPow2(n) => {
467 self.report.div_reduced += 1;
468 let shift_val = LcnfLetValue::App(
469 LcnfArg::Lit(LcnfLit::Str("lshr".into())),
470 vec![var_arg, LcnfArg::Lit(LcnfLit::Nat(*n as u64))],
471 );
472 (shift_val, empty)
473 }
474 StrengthReduceRule::ModByPow2(n) => {
475 self.report.div_reduced += 1;
476 let mask = (1u64 << n) - 1;
477 let and_val = LcnfLetValue::App(
478 LcnfArg::Lit(LcnfLit::Str("band".into())),
479 vec![var_arg, LcnfArg::Lit(LcnfLit::Nat(mask))],
480 );
481 (and_val, empty)
482 }
483 StrengthReduceRule::MulByConstant(c) => {
484 let budget = self.config.max_shift_count;
485 if let Some(ops) = decompose_mul(*c, budget) {
486 self.report.mul_reduced += 1;
487 self.build_shift_add_sequence(var_arg, &ops, ty)
488 } else {
489 (LcnfLetValue::App(func.clone(), args.to_vec()), empty)
490 }
491 }
492 StrengthReduceRule::DivByConstant(c) => {
493 self.report.div_reduced += 1;
494 let magic_val = LcnfLetValue::App(
495 LcnfArg::Lit(LcnfLit::Str("magic_div".into())),
496 vec![var_arg, LcnfArg::Lit(LcnfLit::Nat(*c))],
497 );
498 (magic_val, empty)
499 }
500 StrengthReduceRule::Pow2Const => {
501 self.report.pow_reduced += 1;
502 (
503 LcnfLetValue::App(
504 LcnfArg::Lit(LcnfLit::Str("mul".into())),
505 vec![var_arg.clone(), var_arg],
506 ),
507 empty,
508 )
509 }
510 StrengthReduceRule::Pow3Const => {
511 self.report.pow_reduced += 1;
512 let sq_id = self.fresh_var();
513 let sq_val = LcnfLetValue::App(
514 LcnfArg::Lit(LcnfLit::Str("mul".into())),
515 vec![var_arg.clone(), var_arg.clone()],
516 );
517 let cube_val = LcnfLetValue::App(
518 LcnfArg::Lit(LcnfLit::Str("mul".into())),
519 vec![LcnfArg::Var(sq_id), var_arg],
520 );
521 let prefix = vec![(sq_id, "sq".into(), ty.clone(), sq_val)];
522 (cube_val, prefix)
523 }
524 StrengthReduceRule::NegToSub => {
525 self.report.neg_reduced += 1;
526 (
527 LcnfLetValue::App(LcnfArg::Lit(LcnfLit::Str("neg".into())), vec![var_arg]),
528 empty,
529 )
530 }
531 StrengthReduceRule::AddSubToInc => {
532 self.report.inc_reduced += 1;
533 (
534 LcnfLetValue::App(LcnfArg::Lit(LcnfLit::Str("incr".into())), vec![var_arg]),
535 empty,
536 )
537 }
538 }
539 }
540 pub(super) fn build_shift_add_sequence(
544 &mut self,
545 x: LcnfArg,
546 ops: &[(u32, i64)],
547 ty: &LcnfType,
548 ) -> (
549 LcnfLetValue,
550 Vec<(LcnfVarId, String, LcnfType, LcnfLetValue)>,
551 ) {
552 let mut prefix: Vec<(LcnfVarId, String, LcnfType, LcnfLetValue)> = vec![];
553 if ops.is_empty() {
554 return (LcnfLetValue::Lit(LcnfLit::Nat(0)), prefix);
555 }
556 let (first_shift, _first_sign) = ops[0];
557 let shifted0 = self.fresh_var();
558 prefix.push((
559 shifted0,
560 "sr0".into(),
561 ty.clone(),
562 LcnfLetValue::App(
563 LcnfArg::Lit(LcnfLit::Str("shl".into())),
564 vec![x.clone(), LcnfArg::Lit(LcnfLit::Nat(first_shift as u64))],
565 ),
566 ));
567 let mut acc_var = shifted0;
568 for &(shift, sign) in &ops[1..] {
569 let shifted_var = self.fresh_var();
570 prefix.push((
571 shifted_var,
572 "sr_sh".into(),
573 ty.clone(),
574 LcnfLetValue::App(
575 LcnfArg::Lit(LcnfLit::Str("shl".into())),
576 vec![x.clone(), LcnfArg::Lit(LcnfLit::Nat(shift as u64))],
577 ),
578 ));
579 let combined = self.fresh_var();
580 let op_name = if sign > 0 { "add" } else { "sub" };
581 prefix.push((
582 combined,
583 "sr_acc".into(),
584 ty.clone(),
585 LcnfLetValue::App(
586 LcnfArg::Lit(LcnfLit::Str(op_name.into())),
587 vec![LcnfArg::Var(acc_var), LcnfArg::Var(shifted_var)],
588 ),
589 ));
590 acc_var = combined;
591 }
592 (LcnfLetValue::FVar(acc_var), prefix)
593 }
594 pub fn detect_induction_vars(&self, decl: &LcnfFunDecl) -> Vec<InductionVariable> {
600 let mut ivs = Vec::new();
601 let params: Vec<&LcnfParam> = decl.params.iter().collect();
602 self.find_tail_call_ivs(&decl.body, ¶ms, &mut ivs);
603 ivs
604 }
605 pub(super) fn find_tail_call_ivs(
606 &self,
607 expr: &LcnfExpr,
608 params: &[&LcnfParam],
609 ivs: &mut Vec<InductionVariable>,
610 ) {
611 match expr {
612 LcnfExpr::TailCall(_, args) => {
613 for (i, _arg) in args.iter().enumerate() {
614 if i < params.len() {
615 let p = params[i];
616 if ivs.iter().all(|iv| iv.var != p.id) {
617 ivs.push(InductionVariable::new(
618 p.id,
619 LcnfArg::Lit(LcnfLit::Nat(0)),
620 1,
621 p.name.clone(),
622 ));
623 }
624 }
625 }
626 }
627 LcnfExpr::Let { body, .. } => self.find_tail_call_ivs(body, params, ivs),
628 LcnfExpr::Case { alts, default, .. } => {
629 for alt in alts {
630 self.find_tail_call_ivs(&alt.body, params, ivs);
631 }
632 if let Some(d) = default {
633 self.find_tail_call_ivs(d, params, ivs);
634 }
635 }
636 LcnfExpr::Return(_) | LcnfExpr::Unreachable => {}
637 }
638 }
639 pub fn reduce_iv_uses(&mut self, expr: &LcnfExpr, iv: &InductionVariable) -> LcnfExpr {
645 let linears = collect_linear_uses(expr, iv.var);
646 if linears.is_empty() {
647 return expr.clone();
648 }
649 self.report.iv_reductions += linears.len();
650 rewrite_linear_uses(expr, &linears, iv)
651 }
652 pub fn report(&self) -> StrengthReport {
654 self.report.clone()
655 }
656}
657#[allow(dead_code)]
659#[derive(Debug, Clone)]
660pub struct SRX2PassConfig {
661 pub name: String,
662 pub phase: SRX2PassPhase,
663 pub enabled: bool,
664 pub max_iterations: usize,
665 pub debug: u32,
666 pub timeout_ms: Option<u64>,
667}
668impl SRX2PassConfig {
669 #[allow(dead_code)]
670 pub fn new(name: impl Into<String>) -> Self {
671 Self {
672 name: name.into(),
673 phase: SRX2PassPhase::Middle,
674 enabled: true,
675 max_iterations: 100,
676 debug: 0,
677 timeout_ms: None,
678 }
679 }
680 #[allow(dead_code)]
681 pub fn with_phase(mut self, phase: SRX2PassPhase) -> Self {
682 self.phase = phase;
683 self
684 }
685 #[allow(dead_code)]
686 pub fn with_max_iter(mut self, n: usize) -> Self {
687 self.max_iterations = n;
688 self
689 }
690 #[allow(dead_code)]
691 pub fn with_debug(mut self, d: u32) -> Self {
692 self.debug = d;
693 self
694 }
695 #[allow(dead_code)]
696 pub fn disabled(mut self) -> Self {
697 self.enabled = false;
698 self
699 }
700 #[allow(dead_code)]
701 pub fn with_timeout(mut self, ms: u64) -> Self {
702 self.timeout_ms = Some(ms);
703 self
704 }
705 #[allow(dead_code)]
706 pub fn is_debug_enabled(&self) -> bool {
707 self.debug > 0
708 }
709}
710#[allow(dead_code)]
712#[derive(Debug, Clone, Default)]
713pub struct SRExtConstFolder {
714 pub(super) folds: usize,
715 pub(super) failures: usize,
716 pub(super) enabled: bool,
717}
718impl SRExtConstFolder {
719 #[allow(dead_code)]
720 pub fn new() -> Self {
721 Self {
722 folds: 0,
723 failures: 0,
724 enabled: true,
725 }
726 }
727 #[allow(dead_code)]
728 pub fn add_i64(&mut self, a: i64, b: i64) -> Option<i64> {
729 self.folds += 1;
730 a.checked_add(b)
731 }
732 #[allow(dead_code)]
733 pub fn sub_i64(&mut self, a: i64, b: i64) -> Option<i64> {
734 self.folds += 1;
735 a.checked_sub(b)
736 }
737 #[allow(dead_code)]
738 pub fn mul_i64(&mut self, a: i64, b: i64) -> Option<i64> {
739 self.folds += 1;
740 a.checked_mul(b)
741 }
742 #[allow(dead_code)]
743 pub fn div_i64(&mut self, a: i64, b: i64) -> Option<i64> {
744 if b == 0 {
745 self.failures += 1;
746 None
747 } else {
748 self.folds += 1;
749 a.checked_div(b)
750 }
751 }
752 #[allow(dead_code)]
753 pub fn rem_i64(&mut self, a: i64, b: i64) -> Option<i64> {
754 if b == 0 {
755 self.failures += 1;
756 None
757 } else {
758 self.folds += 1;
759 a.checked_rem(b)
760 }
761 }
762 #[allow(dead_code)]
763 pub fn neg_i64(&mut self, a: i64) -> Option<i64> {
764 self.folds += 1;
765 a.checked_neg()
766 }
767 #[allow(dead_code)]
768 pub fn shl_i64(&mut self, a: i64, s: u32) -> Option<i64> {
769 if s >= 64 {
770 self.failures += 1;
771 None
772 } else {
773 self.folds += 1;
774 a.checked_shl(s)
775 }
776 }
777 #[allow(dead_code)]
778 pub fn shr_i64(&mut self, a: i64, s: u32) -> Option<i64> {
779 if s >= 64 {
780 self.failures += 1;
781 None
782 } else {
783 self.folds += 1;
784 a.checked_shr(s)
785 }
786 }
787 #[allow(dead_code)]
788 pub fn and_i64(&mut self, a: i64, b: i64) -> i64 {
789 self.folds += 1;
790 a & b
791 }
792 #[allow(dead_code)]
793 pub fn or_i64(&mut self, a: i64, b: i64) -> i64 {
794 self.folds += 1;
795 a | b
796 }
797 #[allow(dead_code)]
798 pub fn xor_i64(&mut self, a: i64, b: i64) -> i64 {
799 self.folds += 1;
800 a ^ b
801 }
802 #[allow(dead_code)]
803 pub fn not_i64(&mut self, a: i64) -> i64 {
804 self.folds += 1;
805 !a
806 }
807 #[allow(dead_code)]
808 pub fn fold_count(&self) -> usize {
809 self.folds
810 }
811 #[allow(dead_code)]
812 pub fn failure_count(&self) -> usize {
813 self.failures
814 }
815 #[allow(dead_code)]
816 pub fn enable(&mut self) {
817 self.enabled = true;
818 }
819 #[allow(dead_code)]
820 pub fn disable(&mut self) {
821 self.enabled = false;
822 }
823 #[allow(dead_code)]
824 pub fn is_enabled(&self) -> bool {
825 self.enabled
826 }
827}
828#[allow(dead_code)]
830#[derive(Debug, Clone, Default)]
831pub struct SRExtPassStats {
832 pub iterations: usize,
833 pub changed: bool,
834 pub nodes_visited: usize,
835 pub nodes_modified: usize,
836 pub time_ms: u64,
837 pub memory_bytes: usize,
838 pub errors: usize,
839}
840impl SRExtPassStats {
841 #[allow(dead_code)]
842 pub fn new() -> Self {
843 Self::default()
844 }
845 #[allow(dead_code)]
846 pub fn visit(&mut self) {
847 self.nodes_visited += 1;
848 }
849 #[allow(dead_code)]
850 pub fn modify(&mut self) {
851 self.nodes_modified += 1;
852 self.changed = true;
853 }
854 #[allow(dead_code)]
855 pub fn iterate(&mut self) {
856 self.iterations += 1;
857 }
858 #[allow(dead_code)]
859 pub fn error(&mut self) {
860 self.errors += 1;
861 }
862 #[allow(dead_code)]
863 pub fn efficiency(&self) -> f64 {
864 if self.nodes_visited == 0 {
865 0.0
866 } else {
867 self.nodes_modified as f64 / self.nodes_visited as f64
868 }
869 }
870 #[allow(dead_code)]
871 pub fn merge(&mut self, o: &SRExtPassStats) {
872 self.iterations += o.iterations;
873 self.changed |= o.changed;
874 self.nodes_visited += o.nodes_visited;
875 self.nodes_modified += o.nodes_modified;
876 self.time_ms += o.time_ms;
877 self.memory_bytes = self.memory_bytes.max(o.memory_bytes);
878 self.errors += o.errors;
879 }
880}
881#[allow(dead_code)]
882#[derive(Debug, Clone)]
883pub struct OSLivenessInfo {
884 pub live_in: Vec<std::collections::HashSet<u32>>,
885 pub live_out: Vec<std::collections::HashSet<u32>>,
886 pub defs: Vec<std::collections::HashSet<u32>>,
887 pub uses: Vec<std::collections::HashSet<u32>>,
888}
889impl OSLivenessInfo {
890 #[allow(dead_code)]
891 pub fn new(block_count: usize) -> Self {
892 OSLivenessInfo {
893 live_in: vec![std::collections::HashSet::new(); block_count],
894 live_out: vec![std::collections::HashSet::new(); block_count],
895 defs: vec![std::collections::HashSet::new(); block_count],
896 uses: vec![std::collections::HashSet::new(); block_count],
897 }
898 }
899 #[allow(dead_code)]
900 pub fn add_def(&mut self, block: usize, var: u32) {
901 if block < self.defs.len() {
902 self.defs[block].insert(var);
903 }
904 }
905 #[allow(dead_code)]
906 pub fn add_use(&mut self, block: usize, var: u32) {
907 if block < self.uses.len() {
908 self.uses[block].insert(var);
909 }
910 }
911 #[allow(dead_code)]
912 pub fn is_live_in(&self, block: usize, var: u32) -> bool {
913 self.live_in
914 .get(block)
915 .map(|s| s.contains(&var))
916 .unwrap_or(false)
917 }
918 #[allow(dead_code)]
919 pub fn is_live_out(&self, block: usize, var: u32) -> bool {
920 self.live_out
921 .get(block)
922 .map(|s| s.contains(&var))
923 .unwrap_or(false)
924 }
925}
926#[derive(Clone, Default, Debug)]
928pub struct StrengthReport {
929 pub mul_reduced: usize,
931 pub div_reduced: usize,
933 pub pow_reduced: usize,
935 pub iv_reductions: usize,
937 pub inc_reduced: usize,
939 pub neg_reduced: usize,
941}
942#[allow(dead_code)]
943#[derive(Debug, Clone)]
944pub struct OSDominatorTree {
945 pub idom: Vec<Option<u32>>,
946 pub dom_children: Vec<Vec<u32>>,
947 pub dom_depth: Vec<u32>,
948}
949impl OSDominatorTree {
950 #[allow(dead_code)]
951 pub fn new(size: usize) -> Self {
952 OSDominatorTree {
953 idom: vec![None; size],
954 dom_children: vec![Vec::new(); size],
955 dom_depth: vec![0; size],
956 }
957 }
958 #[allow(dead_code)]
959 pub fn set_idom(&mut self, node: usize, idom: u32) {
960 self.idom[node] = Some(idom);
961 }
962 #[allow(dead_code)]
963 pub fn dominates(&self, a: usize, b: usize) -> bool {
964 if a == b {
965 return true;
966 }
967 let mut cur = b;
968 loop {
969 match self.idom[cur] {
970 Some(parent) if parent as usize == a => return true,
971 Some(parent) if parent as usize == cur => return false,
972 Some(parent) => cur = parent as usize,
973 None => return false,
974 }
975 }
976 }
977 #[allow(dead_code)]
978 pub fn depth(&self, node: usize) -> u32 {
979 self.dom_depth.get(node).copied().unwrap_or(0)
980 }
981}
982#[allow(dead_code)]
984#[derive(Debug, Clone)]
985pub struct SRExtDepGraph {
986 pub(super) n: usize,
987 pub(super) adj: Vec<Vec<usize>>,
988 pub(super) rev: Vec<Vec<usize>>,
989 pub(super) edge_count: usize,
990}
991impl SRExtDepGraph {
992 #[allow(dead_code)]
993 pub fn new(n: usize) -> Self {
994 Self {
995 n,
996 adj: vec![Vec::new(); n],
997 rev: vec![Vec::new(); n],
998 edge_count: 0,
999 }
1000 }
1001 #[allow(dead_code)]
1002 pub fn add_edge(&mut self, from: usize, to: usize) {
1003 if from < self.n && to < self.n {
1004 if !self.adj[from].contains(&to) {
1005 self.adj[from].push(to);
1006 self.rev[to].push(from);
1007 self.edge_count += 1;
1008 }
1009 }
1010 }
1011 #[allow(dead_code)]
1012 pub fn succs(&self, n: usize) -> &[usize] {
1013 self.adj.get(n).map(|v| v.as_slice()).unwrap_or(&[])
1014 }
1015 #[allow(dead_code)]
1016 pub fn preds(&self, n: usize) -> &[usize] {
1017 self.rev.get(n).map(|v| v.as_slice()).unwrap_or(&[])
1018 }
1019 #[allow(dead_code)]
1020 pub fn topo_sort(&self) -> Option<Vec<usize>> {
1021 let mut deg: Vec<usize> = (0..self.n).map(|i| self.rev[i].len()).collect();
1022 let mut q: std::collections::VecDeque<usize> =
1023 (0..self.n).filter(|&i| deg[i] == 0).collect();
1024 let mut out = Vec::with_capacity(self.n);
1025 while let Some(u) = q.pop_front() {
1026 out.push(u);
1027 for &v in &self.adj[u] {
1028 deg[v] -= 1;
1029 if deg[v] == 0 {
1030 q.push_back(v);
1031 }
1032 }
1033 }
1034 if out.len() == self.n {
1035 Some(out)
1036 } else {
1037 None
1038 }
1039 }
1040 #[allow(dead_code)]
1041 pub fn has_cycle(&self) -> bool {
1042 self.topo_sort().is_none()
1043 }
1044 #[allow(dead_code)]
1045 pub fn reachable(&self, start: usize) -> Vec<usize> {
1046 let mut vis = vec![false; self.n];
1047 let mut stk = vec![start];
1048 let mut out = Vec::new();
1049 while let Some(u) = stk.pop() {
1050 if u < self.n && !vis[u] {
1051 vis[u] = true;
1052 out.push(u);
1053 for &v in &self.adj[u] {
1054 if !vis[v] {
1055 stk.push(v);
1056 }
1057 }
1058 }
1059 }
1060 out
1061 }
1062 #[allow(dead_code)]
1063 pub fn scc(&self) -> Vec<Vec<usize>> {
1064 let mut visited = vec![false; self.n];
1065 let mut order = Vec::new();
1066 for i in 0..self.n {
1067 if !visited[i] {
1068 let mut stk = vec![(i, 0usize)];
1069 while let Some((u, idx)) = stk.last_mut() {
1070 if !visited[*u] {
1071 visited[*u] = true;
1072 }
1073 if *idx < self.adj[*u].len() {
1074 let v = self.adj[*u][*idx];
1075 *idx += 1;
1076 if !visited[v] {
1077 stk.push((v, 0));
1078 }
1079 } else {
1080 order.push(*u);
1081 stk.pop();
1082 }
1083 }
1084 }
1085 }
1086 let mut comp = vec![usize::MAX; self.n];
1087 let mut components: Vec<Vec<usize>> = Vec::new();
1088 for &start in order.iter().rev() {
1089 if comp[start] == usize::MAX {
1090 let cid = components.len();
1091 let mut component = Vec::new();
1092 let mut stk = vec![start];
1093 while let Some(u) = stk.pop() {
1094 if comp[u] == usize::MAX {
1095 comp[u] = cid;
1096 component.push(u);
1097 for &v in &self.rev[u] {
1098 if comp[v] == usize::MAX {
1099 stk.push(v);
1100 }
1101 }
1102 }
1103 }
1104 components.push(component);
1105 }
1106 }
1107 components
1108 }
1109 #[allow(dead_code)]
1110 pub fn node_count(&self) -> usize {
1111 self.n
1112 }
1113 #[allow(dead_code)]
1114 pub fn edge_count(&self) -> usize {
1115 self.edge_count
1116 }
1117}
1118#[allow(dead_code)]
1119#[derive(Debug, Clone)]
1120pub struct OSAnalysisCache {
1121 pub(super) entries: std::collections::HashMap<String, OSCacheEntry>,
1122 pub(super) max_size: usize,
1123 pub(super) hits: u64,
1124 pub(super) misses: u64,
1125}
1126impl OSAnalysisCache {
1127 #[allow(dead_code)]
1128 pub fn new(max_size: usize) -> Self {
1129 OSAnalysisCache {
1130 entries: std::collections::HashMap::new(),
1131 max_size,
1132 hits: 0,
1133 misses: 0,
1134 }
1135 }
1136 #[allow(dead_code)]
1137 pub fn get(&mut self, key: &str) -> Option<&OSCacheEntry> {
1138 if self.entries.contains_key(key) {
1139 self.hits += 1;
1140 self.entries.get(key)
1141 } else {
1142 self.misses += 1;
1143 None
1144 }
1145 }
1146 #[allow(dead_code)]
1147 pub fn insert(&mut self, key: String, data: Vec<u8>) {
1148 if self.entries.len() >= self.max_size {
1149 if let Some(oldest) = self.entries.keys().next().cloned() {
1150 self.entries.remove(&oldest);
1151 }
1152 }
1153 self.entries.insert(
1154 key.clone(),
1155 OSCacheEntry {
1156 key,
1157 data,
1158 timestamp: 0,
1159 valid: true,
1160 },
1161 );
1162 }
1163 #[allow(dead_code)]
1164 pub fn invalidate(&mut self, key: &str) {
1165 if let Some(entry) = self.entries.get_mut(key) {
1166 entry.valid = false;
1167 }
1168 }
1169 #[allow(dead_code)]
1170 pub fn clear(&mut self) {
1171 self.entries.clear();
1172 }
1173 #[allow(dead_code)]
1174 pub fn hit_rate(&self) -> f64 {
1175 let total = self.hits + self.misses;
1176 if total == 0 {
1177 return 0.0;
1178 }
1179 self.hits as f64 / total as f64
1180 }
1181 #[allow(dead_code)]
1182 pub fn size(&self) -> usize {
1183 self.entries.len()
1184 }
1185}
1186#[allow(dead_code)]
1188#[derive(Debug, Default)]
1189pub struct SRExtPassRegistry {
1190 pub(super) configs: Vec<SRExtPassConfig>,
1191 pub(super) stats: Vec<SRExtPassStats>,
1192}
1193impl SRExtPassRegistry {
1194 #[allow(dead_code)]
1195 pub fn new() -> Self {
1196 Self::default()
1197 }
1198 #[allow(dead_code)]
1199 pub fn register(&mut self, c: SRExtPassConfig) {
1200 self.stats.push(SRExtPassStats::new());
1201 self.configs.push(c);
1202 }
1203 #[allow(dead_code)]
1204 pub fn len(&self) -> usize {
1205 self.configs.len()
1206 }
1207 #[allow(dead_code)]
1208 pub fn is_empty(&self) -> bool {
1209 self.configs.is_empty()
1210 }
1211 #[allow(dead_code)]
1212 pub fn get(&self, i: usize) -> Option<&SRExtPassConfig> {
1213 self.configs.get(i)
1214 }
1215 #[allow(dead_code)]
1216 pub fn get_stats(&self, i: usize) -> Option<&SRExtPassStats> {
1217 self.stats.get(i)
1218 }
1219 #[allow(dead_code)]
1220 pub fn enabled_passes(&self) -> Vec<&SRExtPassConfig> {
1221 self.configs.iter().filter(|c| c.enabled).collect()
1222 }
1223 #[allow(dead_code)]
1224 pub fn passes_in_phase(&self, ph: &SRExtPassPhase) -> Vec<&SRExtPassConfig> {
1225 self.configs
1226 .iter()
1227 .filter(|c| c.enabled && &c.phase == ph)
1228 .collect()
1229 }
1230 #[allow(dead_code)]
1231 pub fn total_nodes_visited(&self) -> usize {
1232 self.stats.iter().map(|s| s.nodes_visited).sum()
1233 }
1234 #[allow(dead_code)]
1235 pub fn any_changed(&self) -> bool {
1236 self.stats.iter().any(|s| s.changed)
1237 }
1238}
1239#[allow(dead_code)]
1240#[derive(Debug, Clone)]
1241pub struct OSDepGraph {
1242 pub(super) nodes: Vec<u32>,
1243 pub(super) edges: Vec<(u32, u32)>,
1244}
1245impl OSDepGraph {
1246 #[allow(dead_code)]
1247 pub fn new() -> Self {
1248 OSDepGraph {
1249 nodes: Vec::new(),
1250 edges: Vec::new(),
1251 }
1252 }
1253 #[allow(dead_code)]
1254 pub fn add_node(&mut self, id: u32) {
1255 if !self.nodes.contains(&id) {
1256 self.nodes.push(id);
1257 }
1258 }
1259 #[allow(dead_code)]
1260 pub fn add_dep(&mut self, dep: u32, dependent: u32) {
1261 self.add_node(dep);
1262 self.add_node(dependent);
1263 self.edges.push((dep, dependent));
1264 }
1265 #[allow(dead_code)]
1266 pub fn dependents_of(&self, node: u32) -> Vec<u32> {
1267 self.edges
1268 .iter()
1269 .filter(|(d, _)| *d == node)
1270 .map(|(_, dep)| *dep)
1271 .collect()
1272 }
1273 #[allow(dead_code)]
1274 pub fn dependencies_of(&self, node: u32) -> Vec<u32> {
1275 self.edges
1276 .iter()
1277 .filter(|(_, dep)| *dep == node)
1278 .map(|(d, _)| *d)
1279 .collect()
1280 }
1281 #[allow(dead_code)]
1282 pub fn topological_sort(&self) -> Vec<u32> {
1283 let mut in_degree: std::collections::HashMap<u32, u32> = std::collections::HashMap::new();
1284 for &n in &self.nodes {
1285 in_degree.insert(n, 0);
1286 }
1287 for (_, dep) in &self.edges {
1288 *in_degree.entry(*dep).or_insert(0) += 1;
1289 }
1290 let mut queue: std::collections::VecDeque<u32> = self
1291 .nodes
1292 .iter()
1293 .filter(|&&n| in_degree[&n] == 0)
1294 .copied()
1295 .collect();
1296 let mut result = Vec::new();
1297 while let Some(node) = queue.pop_front() {
1298 result.push(node);
1299 for dep in self.dependents_of(node) {
1300 let cnt = in_degree.entry(dep).or_insert(0);
1301 *cnt = cnt.saturating_sub(1);
1302 if *cnt == 0 {
1303 queue.push_back(dep);
1304 }
1305 }
1306 }
1307 result
1308 }
1309 #[allow(dead_code)]
1310 pub fn has_cycle(&self) -> bool {
1311 self.topological_sort().len() < self.nodes.len()
1312 }
1313}
1314#[allow(dead_code)]
1316#[derive(Debug, Clone)]
1317pub struct SRX2DepGraph {
1318 pub(super) n: usize,
1319 pub(super) adj: Vec<Vec<usize>>,
1320 pub(super) rev: Vec<Vec<usize>>,
1321 pub(super) edge_count: usize,
1322}
1323impl SRX2DepGraph {
1324 #[allow(dead_code)]
1325 pub fn new(n: usize) -> Self {
1326 Self {
1327 n,
1328 adj: vec![Vec::new(); n],
1329 rev: vec![Vec::new(); n],
1330 edge_count: 0,
1331 }
1332 }
1333 #[allow(dead_code)]
1334 pub fn add_edge(&mut self, from: usize, to: usize) {
1335 if from < self.n && to < self.n {
1336 if !self.adj[from].contains(&to) {
1337 self.adj[from].push(to);
1338 self.rev[to].push(from);
1339 self.edge_count += 1;
1340 }
1341 }
1342 }
1343 #[allow(dead_code)]
1344 pub fn succs(&self, n: usize) -> &[usize] {
1345 self.adj.get(n).map(|v| v.as_slice()).unwrap_or(&[])
1346 }
1347 #[allow(dead_code)]
1348 pub fn preds(&self, n: usize) -> &[usize] {
1349 self.rev.get(n).map(|v| v.as_slice()).unwrap_or(&[])
1350 }
1351 #[allow(dead_code)]
1352 pub fn topo_sort(&self) -> Option<Vec<usize>> {
1353 let mut deg: Vec<usize> = (0..self.n).map(|i| self.rev[i].len()).collect();
1354 let mut q: std::collections::VecDeque<usize> =
1355 (0..self.n).filter(|&i| deg[i] == 0).collect();
1356 let mut out = Vec::with_capacity(self.n);
1357 while let Some(u) = q.pop_front() {
1358 out.push(u);
1359 for &v in &self.adj[u] {
1360 deg[v] -= 1;
1361 if deg[v] == 0 {
1362 q.push_back(v);
1363 }
1364 }
1365 }
1366 if out.len() == self.n {
1367 Some(out)
1368 } else {
1369 None
1370 }
1371 }
1372 #[allow(dead_code)]
1373 pub fn has_cycle(&self) -> bool {
1374 self.topo_sort().is_none()
1375 }
1376 #[allow(dead_code)]
1377 pub fn reachable(&self, start: usize) -> Vec<usize> {
1378 let mut vis = vec![false; self.n];
1379 let mut stk = vec![start];
1380 let mut out = Vec::new();
1381 while let Some(u) = stk.pop() {
1382 if u < self.n && !vis[u] {
1383 vis[u] = true;
1384 out.push(u);
1385 for &v in &self.adj[u] {
1386 if !vis[v] {
1387 stk.push(v);
1388 }
1389 }
1390 }
1391 }
1392 out
1393 }
1394 #[allow(dead_code)]
1395 pub fn scc(&self) -> Vec<Vec<usize>> {
1396 let mut visited = vec![false; self.n];
1397 let mut order = Vec::new();
1398 for i in 0..self.n {
1399 if !visited[i] {
1400 let mut stk = vec![(i, 0usize)];
1401 while let Some((u, idx)) = stk.last_mut() {
1402 if !visited[*u] {
1403 visited[*u] = true;
1404 }
1405 if *idx < self.adj[*u].len() {
1406 let v = self.adj[*u][*idx];
1407 *idx += 1;
1408 if !visited[v] {
1409 stk.push((v, 0));
1410 }
1411 } else {
1412 order.push(*u);
1413 stk.pop();
1414 }
1415 }
1416 }
1417 }
1418 let mut comp = vec![usize::MAX; self.n];
1419 let mut components: Vec<Vec<usize>> = Vec::new();
1420 for &start in order.iter().rev() {
1421 if comp[start] == usize::MAX {
1422 let cid = components.len();
1423 let mut component = Vec::new();
1424 let mut stk = vec![start];
1425 while let Some(u) = stk.pop() {
1426 if comp[u] == usize::MAX {
1427 comp[u] = cid;
1428 component.push(u);
1429 for &v in &self.rev[u] {
1430 if comp[v] == usize::MAX {
1431 stk.push(v);
1432 }
1433 }
1434 }
1435 }
1436 components.push(component);
1437 }
1438 }
1439 components
1440 }
1441 #[allow(dead_code)]
1442 pub fn node_count(&self) -> usize {
1443 self.n
1444 }
1445 #[allow(dead_code)]
1446 pub fn edge_count(&self) -> usize {
1447 self.edge_count
1448 }
1449}
1450#[derive(Clone, Debug)]
1452pub struct StrengthConfig {
1453 pub max_shift_count: u32,
1457 pub optimize_div: bool,
1459 pub optimize_inc: bool,
1462}
1463#[allow(dead_code)]
1465#[derive(Debug, Clone)]
1466pub struct SRExtPassConfig {
1467 pub name: String,
1468 pub phase: SRExtPassPhase,
1469 pub enabled: bool,
1470 pub max_iterations: usize,
1471 pub debug: u32,
1472 pub timeout_ms: Option<u64>,
1473}
1474impl SRExtPassConfig {
1475 #[allow(dead_code)]
1476 pub fn new(name: impl Into<String>) -> Self {
1477 Self {
1478 name: name.into(),
1479 phase: SRExtPassPhase::Middle,
1480 enabled: true,
1481 max_iterations: 100,
1482 debug: 0,
1483 timeout_ms: None,
1484 }
1485 }
1486 #[allow(dead_code)]
1487 pub fn with_phase(mut self, phase: SRExtPassPhase) -> Self {
1488 self.phase = phase;
1489 self
1490 }
1491 #[allow(dead_code)]
1492 pub fn with_max_iter(mut self, n: usize) -> Self {
1493 self.max_iterations = n;
1494 self
1495 }
1496 #[allow(dead_code)]
1497 pub fn with_debug(mut self, d: u32) -> Self {
1498 self.debug = d;
1499 self
1500 }
1501 #[allow(dead_code)]
1502 pub fn disabled(mut self) -> Self {
1503 self.enabled = false;
1504 self
1505 }
1506 #[allow(dead_code)]
1507 pub fn with_timeout(mut self, ms: u64) -> Self {
1508 self.timeout_ms = Some(ms);
1509 self
1510 }
1511 #[allow(dead_code)]
1512 pub fn is_debug_enabled(&self) -> bool {
1513 self.debug > 0
1514 }
1515}
1516#[allow(dead_code)]
1518#[derive(Debug)]
1519pub struct SRX2Cache {
1520 pub(super) entries: Vec<(u64, Vec<u8>, bool, u32)>,
1521 pub(super) cap: usize,
1522 pub(super) total_hits: u64,
1523 pub(super) total_misses: u64,
1524}
1525impl SRX2Cache {
1526 #[allow(dead_code)]
1527 pub fn new(cap: usize) -> Self {
1528 Self {
1529 entries: Vec::new(),
1530 cap,
1531 total_hits: 0,
1532 total_misses: 0,
1533 }
1534 }
1535 #[allow(dead_code)]
1536 pub fn get(&mut self, key: u64) -> Option<&[u8]> {
1537 for e in self.entries.iter_mut() {
1538 if e.0 == key && e.2 {
1539 e.3 += 1;
1540 self.total_hits += 1;
1541 return Some(&e.1);
1542 }
1543 }
1544 self.total_misses += 1;
1545 None
1546 }
1547 #[allow(dead_code)]
1548 pub fn put(&mut self, key: u64, data: Vec<u8>) {
1549 if self.entries.len() >= self.cap {
1550 self.entries.retain(|e| e.2);
1551 if self.entries.len() >= self.cap {
1552 self.entries.remove(0);
1553 }
1554 }
1555 self.entries.push((key, data, true, 0));
1556 }
1557 #[allow(dead_code)]
1558 pub fn invalidate(&mut self) {
1559 for e in self.entries.iter_mut() {
1560 e.2 = false;
1561 }
1562 }
1563 #[allow(dead_code)]
1564 pub fn hit_rate(&self) -> f64 {
1565 let t = self.total_hits + self.total_misses;
1566 if t == 0 {
1567 0.0
1568 } else {
1569 self.total_hits as f64 / t as f64
1570 }
1571 }
1572 #[allow(dead_code)]
1573 pub fn live_count(&self) -> usize {
1574 self.entries.iter().filter(|e| e.2).count()
1575 }
1576}
1577#[allow(dead_code)]
1579#[derive(Debug, Clone, Default)]
1580pub struct SRX2Liveness {
1581 pub live_in: Vec<Vec<usize>>,
1582 pub live_out: Vec<Vec<usize>>,
1583 pub defs: Vec<Vec<usize>>,
1584 pub uses: Vec<Vec<usize>>,
1585}
1586impl SRX2Liveness {
1587 #[allow(dead_code)]
1588 pub fn new(n: usize) -> Self {
1589 Self {
1590 live_in: vec![Vec::new(); n],
1591 live_out: vec![Vec::new(); n],
1592 defs: vec![Vec::new(); n],
1593 uses: vec![Vec::new(); n],
1594 }
1595 }
1596 #[allow(dead_code)]
1597 pub fn live_in(&self, b: usize, v: usize) -> bool {
1598 self.live_in.get(b).map(|s| s.contains(&v)).unwrap_or(false)
1599 }
1600 #[allow(dead_code)]
1601 pub fn live_out(&self, b: usize, v: usize) -> bool {
1602 self.live_out
1603 .get(b)
1604 .map(|s| s.contains(&v))
1605 .unwrap_or(false)
1606 }
1607 #[allow(dead_code)]
1608 pub fn add_def(&mut self, b: usize, v: usize) {
1609 if let Some(s) = self.defs.get_mut(b) {
1610 if !s.contains(&v) {
1611 s.push(v);
1612 }
1613 }
1614 }
1615 #[allow(dead_code)]
1616 pub fn add_use(&mut self, b: usize, v: usize) {
1617 if let Some(s) = self.uses.get_mut(b) {
1618 if !s.contains(&v) {
1619 s.push(v);
1620 }
1621 }
1622 }
1623 #[allow(dead_code)]
1624 pub fn var_is_used_in_block(&self, b: usize, v: usize) -> bool {
1625 self.uses.get(b).map(|s| s.contains(&v)).unwrap_or(false)
1626 }
1627 #[allow(dead_code)]
1628 pub fn var_is_def_in_block(&self, b: usize, v: usize) -> bool {
1629 self.defs.get(b).map(|s| s.contains(&v)).unwrap_or(false)
1630 }
1631}
1632#[allow(dead_code)]
1634#[derive(Debug, Clone)]
1635pub struct SRX2DomTree {
1636 pub(super) idom: Vec<Option<usize>>,
1637 pub(super) children: Vec<Vec<usize>>,
1638 pub(super) depth: Vec<usize>,
1639}
1640impl SRX2DomTree {
1641 #[allow(dead_code)]
1642 pub fn new(n: usize) -> Self {
1643 Self {
1644 idom: vec![None; n],
1645 children: vec![Vec::new(); n],
1646 depth: vec![0; n],
1647 }
1648 }
1649 #[allow(dead_code)]
1650 pub fn set_idom(&mut self, node: usize, dom: usize) {
1651 if node < self.idom.len() {
1652 self.idom[node] = Some(dom);
1653 if dom < self.children.len() {
1654 self.children[dom].push(node);
1655 }
1656 self.depth[node] = if dom < self.depth.len() {
1657 self.depth[dom] + 1
1658 } else {
1659 1
1660 };
1661 }
1662 }
1663 #[allow(dead_code)]
1664 pub fn dominates(&self, a: usize, mut b: usize) -> bool {
1665 if a == b {
1666 return true;
1667 }
1668 let n = self.idom.len();
1669 for _ in 0..n {
1670 match self.idom.get(b).copied().flatten() {
1671 None => return false,
1672 Some(p) if p == a => return true,
1673 Some(p) if p == b => return false,
1674 Some(p) => b = p,
1675 }
1676 }
1677 false
1678 }
1679 #[allow(dead_code)]
1680 pub fn children_of(&self, n: usize) -> &[usize] {
1681 self.children.get(n).map(|v| v.as_slice()).unwrap_or(&[])
1682 }
1683 #[allow(dead_code)]
1684 pub fn depth_of(&self, n: usize) -> usize {
1685 self.depth.get(n).copied().unwrap_or(0)
1686 }
1687 #[allow(dead_code)]
1688 pub fn lca(&self, mut a: usize, mut b: usize) -> usize {
1689 let n = self.idom.len();
1690 for _ in 0..(2 * n) {
1691 if a == b {
1692 return a;
1693 }
1694 if self.depth_of(a) > self.depth_of(b) {
1695 a = self.idom.get(a).and_then(|x| *x).unwrap_or(a);
1696 } else {
1697 b = self.idom.get(b).and_then(|x| *x).unwrap_or(b);
1698 }
1699 }
1700 0
1701 }
1702}
1703#[allow(dead_code)]
1705#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1706pub enum SRExtPassPhase {
1707 Early,
1708 Middle,
1709 Late,
1710 Finalize,
1711}
1712impl SRExtPassPhase {
1713 #[allow(dead_code)]
1714 pub fn is_early(&self) -> bool {
1715 matches!(self, Self::Early)
1716 }
1717 #[allow(dead_code)]
1718 pub fn is_middle(&self) -> bool {
1719 matches!(self, Self::Middle)
1720 }
1721 #[allow(dead_code)]
1722 pub fn is_late(&self) -> bool {
1723 matches!(self, Self::Late)
1724 }
1725 #[allow(dead_code)]
1726 pub fn is_finalize(&self) -> bool {
1727 matches!(self, Self::Finalize)
1728 }
1729 #[allow(dead_code)]
1730 pub fn order(&self) -> u32 {
1731 match self {
1732 Self::Early => 0,
1733 Self::Middle => 1,
1734 Self::Late => 2,
1735 Self::Finalize => 3,
1736 }
1737 }
1738 #[allow(dead_code)]
1739 pub fn from_order(n: u32) -> Option<Self> {
1740 match n {
1741 0 => Some(Self::Early),
1742 1 => Some(Self::Middle),
1743 2 => Some(Self::Late),
1744 3 => Some(Self::Finalize),
1745 _ => None,
1746 }
1747 }
1748}
1749#[allow(dead_code)]
1751#[derive(Debug, Clone, Default)]
1752pub struct SRX2PassStats {
1753 pub iterations: usize,
1754 pub changed: bool,
1755 pub nodes_visited: usize,
1756 pub nodes_modified: usize,
1757 pub time_ms: u64,
1758 pub memory_bytes: usize,
1759 pub errors: usize,
1760}
1761impl SRX2PassStats {
1762 #[allow(dead_code)]
1763 pub fn new() -> Self {
1764 Self::default()
1765 }
1766 #[allow(dead_code)]
1767 pub fn visit(&mut self) {
1768 self.nodes_visited += 1;
1769 }
1770 #[allow(dead_code)]
1771 pub fn modify(&mut self) {
1772 self.nodes_modified += 1;
1773 self.changed = true;
1774 }
1775 #[allow(dead_code)]
1776 pub fn iterate(&mut self) {
1777 self.iterations += 1;
1778 }
1779 #[allow(dead_code)]
1780 pub fn error(&mut self) {
1781 self.errors += 1;
1782 }
1783 #[allow(dead_code)]
1784 pub fn efficiency(&self) -> f64 {
1785 if self.nodes_visited == 0 {
1786 0.0
1787 } else {
1788 self.nodes_modified as f64 / self.nodes_visited as f64
1789 }
1790 }
1791 #[allow(dead_code)]
1792 pub fn merge(&mut self, o: &SRX2PassStats) {
1793 self.iterations += o.iterations;
1794 self.changed |= o.changed;
1795 self.nodes_visited += o.nodes_visited;
1796 self.nodes_modified += o.nodes_modified;
1797 self.time_ms += o.time_ms;
1798 self.memory_bytes = self.memory_bytes.max(o.memory_bytes);
1799 self.errors += o.errors;
1800 }
1801}
1802#[allow(dead_code)]
1804#[derive(Debug)]
1805pub struct SRExtCache {
1806 pub(super) entries: Vec<(u64, Vec<u8>, bool, u32)>,
1807 pub(super) cap: usize,
1808 pub(super) total_hits: u64,
1809 pub(super) total_misses: u64,
1810}
1811impl SRExtCache {
1812 #[allow(dead_code)]
1813 pub fn new(cap: usize) -> Self {
1814 Self {
1815 entries: Vec::new(),
1816 cap,
1817 total_hits: 0,
1818 total_misses: 0,
1819 }
1820 }
1821 #[allow(dead_code)]
1822 pub fn get(&mut self, key: u64) -> Option<&[u8]> {
1823 for e in self.entries.iter_mut() {
1824 if e.0 == key && e.2 {
1825 e.3 += 1;
1826 self.total_hits += 1;
1827 return Some(&e.1);
1828 }
1829 }
1830 self.total_misses += 1;
1831 None
1832 }
1833 #[allow(dead_code)]
1834 pub fn put(&mut self, key: u64, data: Vec<u8>) {
1835 if self.entries.len() >= self.cap {
1836 self.entries.retain(|e| e.2);
1837 if self.entries.len() >= self.cap {
1838 self.entries.remove(0);
1839 }
1840 }
1841 self.entries.push((key, data, true, 0));
1842 }
1843 #[allow(dead_code)]
1844 pub fn invalidate(&mut self) {
1845 for e in self.entries.iter_mut() {
1846 e.2 = false;
1847 }
1848 }
1849 #[allow(dead_code)]
1850 pub fn hit_rate(&self) -> f64 {
1851 let t = self.total_hits + self.total_misses;
1852 if t == 0 {
1853 0.0
1854 } else {
1855 self.total_hits as f64 / t as f64
1856 }
1857 }
1858 #[allow(dead_code)]
1859 pub fn live_count(&self) -> usize {
1860 self.entries.iter().filter(|e| e.2).count()
1861 }
1862}
1863#[allow(dead_code)]
1865#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1866pub enum SRX2PassPhase {
1867 Early,
1868 Middle,
1869 Late,
1870 Finalize,
1871}
1872impl SRX2PassPhase {
1873 #[allow(dead_code)]
1874 pub fn is_early(&self) -> bool {
1875 matches!(self, Self::Early)
1876 }
1877 #[allow(dead_code)]
1878 pub fn is_middle(&self) -> bool {
1879 matches!(self, Self::Middle)
1880 }
1881 #[allow(dead_code)]
1882 pub fn is_late(&self) -> bool {
1883 matches!(self, Self::Late)
1884 }
1885 #[allow(dead_code)]
1886 pub fn is_finalize(&self) -> bool {
1887 matches!(self, Self::Finalize)
1888 }
1889 #[allow(dead_code)]
1890 pub fn order(&self) -> u32 {
1891 match self {
1892 Self::Early => 0,
1893 Self::Middle => 1,
1894 Self::Late => 2,
1895 Self::Finalize => 3,
1896 }
1897 }
1898 #[allow(dead_code)]
1899 pub fn from_order(n: u32) -> Option<Self> {
1900 match n {
1901 0 => Some(Self::Early),
1902 1 => Some(Self::Middle),
1903 2 => Some(Self::Late),
1904 3 => Some(Self::Finalize),
1905 _ => None,
1906 }
1907 }
1908}
1909#[allow(dead_code)]
1911#[derive(Debug, Default)]
1912pub struct SRX2PassRegistry {
1913 pub(super) configs: Vec<SRX2PassConfig>,
1914 pub(super) stats: Vec<SRX2PassStats>,
1915}
1916impl SRX2PassRegistry {
1917 #[allow(dead_code)]
1918 pub fn new() -> Self {
1919 Self::default()
1920 }
1921 #[allow(dead_code)]
1922 pub fn register(&mut self, c: SRX2PassConfig) {
1923 self.stats.push(SRX2PassStats::new());
1924 self.configs.push(c);
1925 }
1926 #[allow(dead_code)]
1927 pub fn len(&self) -> usize {
1928 self.configs.len()
1929 }
1930 #[allow(dead_code)]
1931 pub fn is_empty(&self) -> bool {
1932 self.configs.is_empty()
1933 }
1934 #[allow(dead_code)]
1935 pub fn get(&self, i: usize) -> Option<&SRX2PassConfig> {
1936 self.configs.get(i)
1937 }
1938 #[allow(dead_code)]
1939 pub fn get_stats(&self, i: usize) -> Option<&SRX2PassStats> {
1940 self.stats.get(i)
1941 }
1942 #[allow(dead_code)]
1943 pub fn enabled_passes(&self) -> Vec<&SRX2PassConfig> {
1944 self.configs.iter().filter(|c| c.enabled).collect()
1945 }
1946 #[allow(dead_code)]
1947 pub fn passes_in_phase(&self, ph: &SRX2PassPhase) -> Vec<&SRX2PassConfig> {
1948 self.configs
1949 .iter()
1950 .filter(|c| c.enabled && &c.phase == ph)
1951 .collect()
1952 }
1953 #[allow(dead_code)]
1954 pub fn total_nodes_visited(&self) -> usize {
1955 self.stats.iter().map(|s| s.nodes_visited).sum()
1956 }
1957 #[allow(dead_code)]
1958 pub fn any_changed(&self) -> bool {
1959 self.stats.iter().any(|s| s.changed)
1960 }
1961}
1962#[allow(dead_code)]
1963#[derive(Debug, Clone, Default)]
1964pub struct OSPassStats {
1965 pub total_runs: u32,
1966 pub successful_runs: u32,
1967 pub total_changes: u64,
1968 pub time_ms: u64,
1969 pub iterations_used: u32,
1970}
1971impl OSPassStats {
1972 #[allow(dead_code)]
1973 pub fn new() -> Self {
1974 Self::default()
1975 }
1976 #[allow(dead_code)]
1977 pub fn record_run(&mut self, changes: u64, time_ms: u64, iterations: u32) {
1978 self.total_runs += 1;
1979 self.successful_runs += 1;
1980 self.total_changes += changes;
1981 self.time_ms += time_ms;
1982 self.iterations_used = iterations;
1983 }
1984 #[allow(dead_code)]
1985 pub fn average_changes_per_run(&self) -> f64 {
1986 if self.total_runs == 0 {
1987 return 0.0;
1988 }
1989 self.total_changes as f64 / self.total_runs as f64
1990 }
1991 #[allow(dead_code)]
1992 pub fn success_rate(&self) -> f64 {
1993 if self.total_runs == 0 {
1994 return 0.0;
1995 }
1996 self.successful_runs as f64 / self.total_runs as f64
1997 }
1998 #[allow(dead_code)]
1999 pub fn format_summary(&self) -> String {
2000 format!(
2001 "Runs: {}/{}, Changes: {}, Time: {}ms",
2002 self.successful_runs, self.total_runs, self.total_changes, self.time_ms
2003 )
2004 }
2005}
2006#[allow(dead_code)]
2007#[derive(Debug, Clone, PartialEq)]
2008pub enum OSPassPhase {
2009 Analysis,
2010 Transformation,
2011 Verification,
2012 Cleanup,
2013}
2014impl OSPassPhase {
2015 #[allow(dead_code)]
2016 pub fn name(&self) -> &str {
2017 match self {
2018 OSPassPhase::Analysis => "analysis",
2019 OSPassPhase::Transformation => "transformation",
2020 OSPassPhase::Verification => "verification",
2021 OSPassPhase::Cleanup => "cleanup",
2022 }
2023 }
2024 #[allow(dead_code)]
2025 pub fn is_modifying(&self) -> bool {
2026 matches!(self, OSPassPhase::Transformation | OSPassPhase::Cleanup)
2027 }
2028}
2029#[allow(dead_code)]
2031#[derive(Debug, Clone, Default)]
2032pub struct SRX2ConstFolder {
2033 pub(super) folds: usize,
2034 pub(super) failures: usize,
2035 pub(super) enabled: bool,
2036}
2037impl SRX2ConstFolder {
2038 #[allow(dead_code)]
2039 pub fn new() -> Self {
2040 Self {
2041 folds: 0,
2042 failures: 0,
2043 enabled: true,
2044 }
2045 }
2046 #[allow(dead_code)]
2047 pub fn add_i64(&mut self, a: i64, b: i64) -> Option<i64> {
2048 self.folds += 1;
2049 a.checked_add(b)
2050 }
2051 #[allow(dead_code)]
2052 pub fn sub_i64(&mut self, a: i64, b: i64) -> Option<i64> {
2053 self.folds += 1;
2054 a.checked_sub(b)
2055 }
2056 #[allow(dead_code)]
2057 pub fn mul_i64(&mut self, a: i64, b: i64) -> Option<i64> {
2058 self.folds += 1;
2059 a.checked_mul(b)
2060 }
2061 #[allow(dead_code)]
2062 pub fn div_i64(&mut self, a: i64, b: i64) -> Option<i64> {
2063 if b == 0 {
2064 self.failures += 1;
2065 None
2066 } else {
2067 self.folds += 1;
2068 a.checked_div(b)
2069 }
2070 }
2071 #[allow(dead_code)]
2072 pub fn rem_i64(&mut self, a: i64, b: i64) -> Option<i64> {
2073 if b == 0 {
2074 self.failures += 1;
2075 None
2076 } else {
2077 self.folds += 1;
2078 a.checked_rem(b)
2079 }
2080 }
2081 #[allow(dead_code)]
2082 pub fn neg_i64(&mut self, a: i64) -> Option<i64> {
2083 self.folds += 1;
2084 a.checked_neg()
2085 }
2086 #[allow(dead_code)]
2087 pub fn shl_i64(&mut self, a: i64, s: u32) -> Option<i64> {
2088 if s >= 64 {
2089 self.failures += 1;
2090 None
2091 } else {
2092 self.folds += 1;
2093 a.checked_shl(s)
2094 }
2095 }
2096 #[allow(dead_code)]
2097 pub fn shr_i64(&mut self, a: i64, s: u32) -> Option<i64> {
2098 if s >= 64 {
2099 self.failures += 1;
2100 None
2101 } else {
2102 self.folds += 1;
2103 a.checked_shr(s)
2104 }
2105 }
2106 #[allow(dead_code)]
2107 pub fn and_i64(&mut self, a: i64, b: i64) -> i64 {
2108 self.folds += 1;
2109 a & b
2110 }
2111 #[allow(dead_code)]
2112 pub fn or_i64(&mut self, a: i64, b: i64) -> i64 {
2113 self.folds += 1;
2114 a | b
2115 }
2116 #[allow(dead_code)]
2117 pub fn xor_i64(&mut self, a: i64, b: i64) -> i64 {
2118 self.folds += 1;
2119 a ^ b
2120 }
2121 #[allow(dead_code)]
2122 pub fn not_i64(&mut self, a: i64) -> i64 {
2123 self.folds += 1;
2124 !a
2125 }
2126 #[allow(dead_code)]
2127 pub fn fold_count(&self) -> usize {
2128 self.folds
2129 }
2130 #[allow(dead_code)]
2131 pub fn failure_count(&self) -> usize {
2132 self.failures
2133 }
2134 #[allow(dead_code)]
2135 pub fn enable(&mut self) {
2136 self.enabled = true;
2137 }
2138 #[allow(dead_code)]
2139 pub fn disable(&mut self) {
2140 self.enabled = false;
2141 }
2142 #[allow(dead_code)]
2143 pub fn is_enabled(&self) -> bool {
2144 self.enabled
2145 }
2146}
2147#[allow(dead_code)]
2149#[derive(Debug, Clone)]
2150pub struct SRX2Worklist {
2151 pub(super) items: std::collections::VecDeque<usize>,
2152 pub(super) present: Vec<bool>,
2153}
2154impl SRX2Worklist {
2155 #[allow(dead_code)]
2156 pub fn new(capacity: usize) -> Self {
2157 Self {
2158 items: std::collections::VecDeque::new(),
2159 present: vec![false; capacity],
2160 }
2161 }
2162 #[allow(dead_code)]
2163 pub fn push(&mut self, id: usize) {
2164 if id < self.present.len() && !self.present[id] {
2165 self.present[id] = true;
2166 self.items.push_back(id);
2167 }
2168 }
2169 #[allow(dead_code)]
2170 pub fn push_front(&mut self, id: usize) {
2171 if id < self.present.len() && !self.present[id] {
2172 self.present[id] = true;
2173 self.items.push_front(id);
2174 }
2175 }
2176 #[allow(dead_code)]
2177 pub fn pop(&mut self) -> Option<usize> {
2178 let id = self.items.pop_front()?;
2179 if id < self.present.len() {
2180 self.present[id] = false;
2181 }
2182 Some(id)
2183 }
2184 #[allow(dead_code)]
2185 pub fn is_empty(&self) -> bool {
2186 self.items.is_empty()
2187 }
2188 #[allow(dead_code)]
2189 pub fn len(&self) -> usize {
2190 self.items.len()
2191 }
2192 #[allow(dead_code)]
2193 pub fn contains(&self, id: usize) -> bool {
2194 id < self.present.len() && self.present[id]
2195 }
2196 #[allow(dead_code)]
2197 pub fn drain_all(&mut self) -> Vec<usize> {
2198 let v: Vec<usize> = self.items.drain(..).collect();
2199 for &id in &v {
2200 if id < self.present.len() {
2201 self.present[id] = false;
2202 }
2203 }
2204 v
2205 }
2206}
2207#[derive(Clone, Debug, PartialEq)]
2212pub struct LinearFunction {
2213 pub a: i64,
2215 pub b: i64,
2217 pub iv: LcnfVarId,
2219}
2220impl LinearFunction {
2221 pub fn new(iv: LcnfVarId, a: i64, b: i64) -> Self {
2222 LinearFunction { a, b, iv }
2223 }
2224 pub fn eval(&self, iv_val: i64) -> i64 {
2226 self.a * iv_val + self.b
2227 }
2228 pub fn is_identity(&self) -> bool {
2230 self.a == 1 && self.b == 0
2231 }
2232}
2233#[allow(dead_code)]
2234#[derive(Debug, Clone)]
2235pub struct OSCacheEntry {
2236 pub key: String,
2237 pub data: Vec<u8>,
2238 pub timestamp: u64,
2239 pub valid: bool,
2240}
2241#[allow(dead_code)]
2242#[derive(Debug, Clone)]
2243pub struct OSPassConfig {
2244 pub phase: OSPassPhase,
2245 pub enabled: bool,
2246 pub max_iterations: u32,
2247 pub debug_output: bool,
2248 pub pass_name: String,
2249}
2250impl OSPassConfig {
2251 #[allow(dead_code)]
2252 pub fn new(name: impl Into<String>, phase: OSPassPhase) -> Self {
2253 OSPassConfig {
2254 phase,
2255 enabled: true,
2256 max_iterations: 10,
2257 debug_output: false,
2258 pass_name: name.into(),
2259 }
2260 }
2261 #[allow(dead_code)]
2262 pub fn disabled(mut self) -> Self {
2263 self.enabled = false;
2264 self
2265 }
2266 #[allow(dead_code)]
2267 pub fn with_debug(mut self) -> Self {
2268 self.debug_output = true;
2269 self
2270 }
2271 #[allow(dead_code)]
2272 pub fn max_iter(mut self, n: u32) -> Self {
2273 self.max_iterations = n;
2274 self
2275 }
2276}
2277#[allow(dead_code)]
2278#[derive(Debug, Clone)]
2279pub struct OSWorklist {
2280 pub(super) items: std::collections::VecDeque<u32>,
2281 pub(super) in_worklist: std::collections::HashSet<u32>,
2282}
2283impl OSWorklist {
2284 #[allow(dead_code)]
2285 pub fn new() -> Self {
2286 OSWorklist {
2287 items: std::collections::VecDeque::new(),
2288 in_worklist: std::collections::HashSet::new(),
2289 }
2290 }
2291 #[allow(dead_code)]
2292 pub fn push(&mut self, item: u32) -> bool {
2293 if self.in_worklist.insert(item) {
2294 self.items.push_back(item);
2295 true
2296 } else {
2297 false
2298 }
2299 }
2300 #[allow(dead_code)]
2301 pub fn pop(&mut self) -> Option<u32> {
2302 let item = self.items.pop_front()?;
2303 self.in_worklist.remove(&item);
2304 Some(item)
2305 }
2306 #[allow(dead_code)]
2307 pub fn is_empty(&self) -> bool {
2308 self.items.is_empty()
2309 }
2310 #[allow(dead_code)]
2311 pub fn len(&self) -> usize {
2312 self.items.len()
2313 }
2314 #[allow(dead_code)]
2315 pub fn contains(&self, item: u32) -> bool {
2316 self.in_worklist.contains(&item)
2317 }
2318}
2319#[derive(Clone, Debug)]
2325pub struct InductionVariable {
2326 pub var: LcnfVarId,
2328 pub base: LcnfArg,
2330 pub step: i64,
2332 pub name: String,
2334}
2335impl InductionVariable {
2336 pub fn new(var: LcnfVarId, base: LcnfArg, step: i64, name: String) -> Self {
2337 InductionVariable {
2338 var,
2339 base,
2340 step,
2341 name,
2342 }
2343 }
2344}
2345#[allow(dead_code)]
2347#[derive(Debug, Clone)]
2348pub struct SRExtWorklist {
2349 pub(super) items: std::collections::VecDeque<usize>,
2350 pub(super) present: Vec<bool>,
2351}
2352impl SRExtWorklist {
2353 #[allow(dead_code)]
2354 pub fn new(capacity: usize) -> Self {
2355 Self {
2356 items: std::collections::VecDeque::new(),
2357 present: vec![false; capacity],
2358 }
2359 }
2360 #[allow(dead_code)]
2361 pub fn push(&mut self, id: usize) {
2362 if id < self.present.len() && !self.present[id] {
2363 self.present[id] = true;
2364 self.items.push_back(id);
2365 }
2366 }
2367 #[allow(dead_code)]
2368 pub fn push_front(&mut self, id: usize) {
2369 if id < self.present.len() && !self.present[id] {
2370 self.present[id] = true;
2371 self.items.push_front(id);
2372 }
2373 }
2374 #[allow(dead_code)]
2375 pub fn pop(&mut self) -> Option<usize> {
2376 let id = self.items.pop_front()?;
2377 if id < self.present.len() {
2378 self.present[id] = false;
2379 }
2380 Some(id)
2381 }
2382 #[allow(dead_code)]
2383 pub fn is_empty(&self) -> bool {
2384 self.items.is_empty()
2385 }
2386 #[allow(dead_code)]
2387 pub fn len(&self) -> usize {
2388 self.items.len()
2389 }
2390 #[allow(dead_code)]
2391 pub fn contains(&self, id: usize) -> bool {
2392 id < self.present.len() && self.present[id]
2393 }
2394 #[allow(dead_code)]
2395 pub fn drain_all(&mut self) -> Vec<usize> {
2396 let v: Vec<usize> = self.items.drain(..).collect();
2397 for &id in &v {
2398 if id < self.present.len() {
2399 self.present[id] = false;
2400 }
2401 }
2402 v
2403 }
2404}
2405#[allow(dead_code)]
2406pub struct OSPassRegistry {
2407 pub(super) configs: Vec<OSPassConfig>,
2408 pub(super) stats: std::collections::HashMap<String, OSPassStats>,
2409}
2410impl OSPassRegistry {
2411 #[allow(dead_code)]
2412 pub fn new() -> Self {
2413 OSPassRegistry {
2414 configs: Vec::new(),
2415 stats: std::collections::HashMap::new(),
2416 }
2417 }
2418 #[allow(dead_code)]
2419 pub fn register(&mut self, config: OSPassConfig) {
2420 self.stats
2421 .insert(config.pass_name.clone(), OSPassStats::new());
2422 self.configs.push(config);
2423 }
2424 #[allow(dead_code)]
2425 pub fn enabled_passes(&self) -> Vec<&OSPassConfig> {
2426 self.configs.iter().filter(|c| c.enabled).collect()
2427 }
2428 #[allow(dead_code)]
2429 pub fn get_stats(&self, name: &str) -> Option<&OSPassStats> {
2430 self.stats.get(name)
2431 }
2432 #[allow(dead_code)]
2433 pub fn total_passes(&self) -> usize {
2434 self.configs.len()
2435 }
2436 #[allow(dead_code)]
2437 pub fn enabled_count(&self) -> usize {
2438 self.enabled_passes().len()
2439 }
2440 #[allow(dead_code)]
2441 pub fn update_stats(&mut self, name: &str, changes: u64, time_ms: u64, iter: u32) {
2442 if let Some(stats) = self.stats.get_mut(name) {
2443 stats.record_run(changes, time_ms, iter);
2444 }
2445 }
2446}