1use crate::lcnf::*;
6use std::collections::{HashMap, HashSet};
7
8use super::functions::*;
9use std::collections::VecDeque;
10
11#[derive(Debug, Clone, Default)]
13pub struct SpecializationStats {
14 pub type_specializations: usize,
16 pub const_specializations: usize,
18 pub closure_specializations: usize,
20 pub call_sites_redirected: usize,
22 pub total_code_growth: usize,
24 pub skipped_budget: usize,
26 pub skipped_limit: usize,
28 pub functions_analyzed: usize,
30}
31#[allow(dead_code)]
32pub struct SpecPassRegistry {
33 pub(super) configs: Vec<SpecPassConfig>,
34 pub(super) stats: std::collections::HashMap<String, SpecPassStats>,
35}
36impl SpecPassRegistry {
37 #[allow(dead_code)]
38 pub fn new() -> Self {
39 SpecPassRegistry {
40 configs: Vec::new(),
41 stats: std::collections::HashMap::new(),
42 }
43 }
44 #[allow(dead_code)]
45 pub fn register(&mut self, config: SpecPassConfig) {
46 self.stats
47 .insert(config.pass_name.clone(), SpecPassStats::new());
48 self.configs.push(config);
49 }
50 #[allow(dead_code)]
51 pub fn enabled_passes(&self) -> Vec<&SpecPassConfig> {
52 self.configs.iter().filter(|c| c.enabled).collect()
53 }
54 #[allow(dead_code)]
55 pub fn get_stats(&self, name: &str) -> Option<&SpecPassStats> {
56 self.stats.get(name)
57 }
58 #[allow(dead_code)]
59 pub fn total_passes(&self) -> usize {
60 self.configs.len()
61 }
62 #[allow(dead_code)]
63 pub fn enabled_count(&self) -> usize {
64 self.enabled_passes().len()
65 }
66 #[allow(dead_code)]
67 pub fn update_stats(&mut self, name: &str, changes: u64, time_ms: u64, iter: u32) {
68 if let Some(stats) = self.stats.get_mut(name) {
69 stats.record_run(changes, time_ms, iter);
70 }
71 }
72}
73#[derive(Debug, Clone)]
75pub struct SpecializationConfig {
76 pub max_specializations: usize,
78 pub specialize_closures: bool,
80 pub specialize_numerics: bool,
82 pub size_threshold: usize,
84 pub growth_factor: f64,
86 pub allow_recursive: bool,
88 pub specialize_type_params: bool,
90 pub max_recursive_depth: usize,
92}
93#[derive(Debug, Clone)]
95pub struct SpecializedDecl {
96 pub key: SpecializationKey,
98 pub decl: LcnfFunDecl,
100 pub code_growth: usize,
102 pub from_recursive: bool,
104}
105#[allow(dead_code)]
106#[derive(Debug, Clone)]
107pub struct SpecCacheEntry {
108 pub key: String,
109 pub data: Vec<u8>,
110 pub timestamp: u64,
111 pub valid: bool,
112}
113#[derive(Debug, Clone, PartialEq, Eq, Hash)]
115pub struct SpecClosureArg {
116 pub known_fn: Option<String>,
118 pub param_idx: usize,
120}
121pub struct SpecializationPass {
123 pub(super) config: SpecializationConfig,
124 pub(super) cache: SpecializationCache,
125 pub(super) stats: SpecializationStats,
126 pub(super) next_id: u64,
127 pub(super) var_to_fn: HashMap<LcnfVarId, String>,
129 pub(super) original_sizes: HashMap<String, usize>,
131}
132impl SpecializationPass {
133 pub fn new(config: SpecializationConfig) -> Self {
135 SpecializationPass {
136 config,
137 cache: SpecializationCache::new(),
138 stats: SpecializationStats::default(),
139 next_id: 5000,
140 var_to_fn: HashMap::new(),
141 original_sizes: HashMap::new(),
142 }
143 }
144 pub fn stats(&self) -> &SpecializationStats {
146 &self.stats
147 }
148 pub(super) fn fresh_id(&mut self) -> LcnfVarId {
150 let id = self.next_id;
151 self.next_id += 1;
152 LcnfVarId(id)
153 }
154 pub(super) fn run(&mut self, module: &mut LcnfModule) {
156 for decl in &module.fun_decls {
157 let size = count_instructions(&decl.body);
158 self.original_sizes.insert(decl.name.clone(), size);
159 }
160 let decl_names: HashSet<String> = module.fun_decls.iter().map(|d| d.name.clone()).collect();
161 let mut all_sites: Vec<(String, Vec<SpecCallSite>)> = Vec::new();
162 for decl in &module.fun_decls {
163 self.stats.functions_analyzed += 1;
164 let known_consts: HashMap<LcnfVarId, LcnfLit> = HashMap::new();
165 let sites =
166 find_specialization_sites(&decl.body, &known_consts, &self.var_to_fn, &decl_names);
167 if !sites.is_empty() {
168 all_sites.push((decl.name.clone(), sites));
169 }
170 }
171 let mut new_decls: Vec<LcnfFunDecl> = Vec::new();
172 let total_original_size: usize = self.original_sizes.values().sum();
173 let budget = (total_original_size as f64 * self.config.growth_factor) as usize;
174 for (_caller, sites) in &all_sites {
175 for site in sites {
176 if self.cache.total_growth >= budget {
177 self.stats.skipped_budget += 1;
178 continue;
179 }
180 if self.cache.specialization_count(&site.callee) >= self.config.max_specializations
181 {
182 self.stats.skipped_limit += 1;
183 continue;
184 }
185 let key = self.build_key(site);
186 if key.is_trivial() {
187 continue;
188 }
189 if self.cache.lookup(&key).is_some() {
190 continue;
191 }
192 if let Some(original) = module.fun_decls.iter().find(|d| d.name == site.callee) {
193 let original_size = count_instructions(&original.body);
194 if original_size > self.config.size_threshold {
195 self.stats.skipped_budget += 1;
196 continue;
197 }
198 if let Some(spec_decl) = self.create_specialization(original, &key) {
199 let growth = count_instructions(&spec_decl.decl.body);
200 self.cache
201 .insert(key.clone(), spec_decl.decl.name.clone(), growth);
202 self.update_stats(&key);
203 new_decls.push(spec_decl.decl);
204 }
205 }
206 }
207 }
208 module.fun_decls.extend(new_decls);
209 self.redirect_call_sites(module);
210 }
211 pub(super) fn build_key(&self, site: &SpecCallSite) -> SpecializationKey {
213 let type_args = if site.type_args.is_empty() {
214 vec![]
215 } else {
216 site.type_args.clone()
217 };
218 SpecializationKey {
219 original: site.callee.clone(),
220 type_args,
221 const_args: site.const_args.clone(),
222 closure_args: site.closure_args.clone(),
223 }
224 }
225 pub(super) fn create_specialization(
227 &mut self,
228 original: &LcnfFunDecl,
229 key: &SpecializationKey,
230 ) -> Option<SpecializedDecl> {
231 if !self.config.allow_recursive && original.is_recursive {
232 return None;
233 }
234 let spec_name = key.mangled_name();
235 let mut spec_body = original.body.clone();
236 let mut spec_params = original.params.clone();
237 for (i, ca) in key.const_args.iter().enumerate() {
238 match ca {
239 SpecConstArg::Nat(n) => {
240 if i < spec_params.len() {
241 let param_id = spec_params[i].id;
242 self.substitute_constant(&mut spec_body, param_id, &LcnfLit::Nat(*n));
243 spec_params[i].erased = true;
244 }
245 }
246 SpecConstArg::Str(s) => {
247 if i < spec_params.len() {
248 let param_id = spec_params[i].id;
249 self.substitute_constant(
250 &mut spec_body,
251 param_id,
252 &LcnfLit::Str(s.clone()),
253 );
254 spec_params[i].erased = true;
255 }
256 }
257 SpecConstArg::Unknown => {}
258 }
259 }
260 for (i, ta) in key.type_args.iter().enumerate() {
261 if let SpecTypeArg::Concrete(ty) = ta {
262 if i < spec_params.len() {
263 spec_params[i].ty = ty.clone();
264 }
265 }
266 }
267 let active_params: Vec<LcnfParam> = spec_params.into_iter().filter(|p| !p.erased).collect();
268 let spec_decl = LcnfFunDecl {
269 name: spec_name.clone(),
270 original_name: original.original_name.clone(),
271 params: active_params,
272 ret_type: original.ret_type.clone(),
273 body: spec_body,
274 is_recursive: original.is_recursive,
275 is_lifted: true,
276 inline_cost: original.inline_cost,
277 };
278 let code_growth = count_instructions(&spec_decl.body);
279 Some(SpecializedDecl {
280 key: key.clone(),
281 decl: spec_decl,
282 code_growth,
283 from_recursive: original.is_recursive,
284 })
285 }
286 pub(super) fn substitute_constant(&self, expr: &mut LcnfExpr, var: LcnfVarId, lit: &LcnfLit) {
288 match expr {
289 LcnfExpr::Let {
290 id, value, body, ..
291 } => {
292 self.substitute_constant_in_value(value, var, lit);
293 if *id != var {
294 self.substitute_constant(body, var, lit);
295 }
296 }
297 LcnfExpr::Case {
298 scrutinee,
299 alts,
300 default,
301 ..
302 } => {
303 if *scrutinee == var {}
304 for alt in alts.iter_mut() {
305 let shadows = alt.params.iter().any(|p| p.id == var);
306 if !shadows {
307 self.substitute_constant(&mut alt.body, var, lit);
308 }
309 }
310 if let Some(def) = default {
311 self.substitute_constant(def, var, lit);
312 }
313 }
314 LcnfExpr::Return(arg) => {
315 self.substitute_arg(arg, var, lit);
316 }
317 LcnfExpr::TailCall(func, args) => {
318 self.substitute_arg(func, var, lit);
319 for a in args.iter_mut() {
320 self.substitute_arg(a, var, lit);
321 }
322 }
323 LcnfExpr::Unreachable => {}
324 }
325 }
326 pub(super) fn substitute_constant_in_value(
328 &self,
329 value: &mut LcnfLetValue,
330 var: LcnfVarId,
331 lit: &LcnfLit,
332 ) {
333 match value {
334 LcnfLetValue::App(func, args) => {
335 self.substitute_arg(func, var, lit);
336 for a in args.iter_mut() {
337 self.substitute_arg(a, var, lit);
338 }
339 }
340 LcnfLetValue::Proj(_, _, obj) => if *obj == var {},
341 LcnfLetValue::Ctor(_, _, args) => {
342 for a in args.iter_mut() {
343 self.substitute_arg(a, var, lit);
344 }
345 }
346 LcnfLetValue::FVar(v) => {
347 if *v == var {
348 *value = LcnfLetValue::Lit(lit.clone());
349 }
350 }
351 LcnfLetValue::Lit(_)
352 | LcnfLetValue::Erased
353 | LcnfLetValue::Reset(_)
354 | LcnfLetValue::Reuse(_, _, _, _) => {}
355 }
356 }
357 pub(super) fn substitute_arg(&self, arg: &mut LcnfArg, var: LcnfVarId, lit: &LcnfLit) {
359 if let LcnfArg::Var(v) = arg {
360 if *v == var {
361 *arg = LcnfArg::Lit(lit.clone());
362 }
363 }
364 }
365 pub(super) fn update_stats(&mut self, key: &SpecializationKey) {
367 let has_type = key
368 .type_args
369 .iter()
370 .any(|a| matches!(a, SpecTypeArg::Concrete(_)));
371 let has_const = key
372 .const_args
373 .iter()
374 .any(|a| !matches!(a, SpecConstArg::Unknown));
375 let has_closure = key.closure_args.iter().any(|a| a.known_fn.is_some());
376 if has_type {
377 self.stats.type_specializations += 1;
378 }
379 if has_const {
380 self.stats.const_specializations += 1;
381 }
382 if has_closure {
383 self.stats.closure_specializations += 1;
384 }
385 }
386 pub(super) fn redirect_call_sites(&mut self, module: &mut LcnfModule) {
388 let redirects: HashMap<SpecializationKey, String> = self.cache.entries.clone();
389 if redirects.is_empty() {
390 return;
391 }
392 for decl in &mut module.fun_decls {
393 let redirected = self.redirect_in_expr(&mut decl.body, &redirects);
394 self.stats.call_sites_redirected += redirected;
395 }
396 }
397 pub(super) fn redirect_in_expr(
399 &self,
400 expr: &mut LcnfExpr,
401 redirects: &HashMap<SpecializationKey, String>,
402 ) -> usize {
403 let mut count = 0;
404 let mut local_consts: HashMap<LcnfVarId, LcnfLit> = HashMap::new();
405 let mut local_fn_map: HashMap<LcnfVarId, String> = self.var_to_fn.clone();
406 self.redirect_inner(
407 expr,
408 redirects,
409 &mut local_consts,
410 &mut local_fn_map,
411 &mut count,
412 );
413 count
414 }
415 pub(super) fn redirect_inner(
417 &self,
418 expr: &mut LcnfExpr,
419 redirects: &HashMap<SpecializationKey, String>,
420 local_consts: &mut HashMap<LcnfVarId, LcnfLit>,
421 local_fn_map: &mut HashMap<LcnfVarId, String>,
422 count: &mut usize,
423 ) {
424 match expr {
425 LcnfExpr::Let {
426 id, value, body, ..
427 } => {
428 if let LcnfLetValue::App(func, args) = value {
429 if let LcnfArg::Var(v) = func {
430 if let Some(fn_name) = local_fn_map.get(v).cloned() {
431 let const_args = Self::build_const_args(args, local_consts);
432 let closure_args = Self::build_closure_args(args, local_fn_map);
433 for (key, spec_name) in redirects {
434 if key.original == fn_name
435 && key.const_args == const_args
436 && (key.closure_args.is_empty()
437 || key.closure_args == closure_args)
438 {
439 if let Some(spec_var) = local_fn_map
440 .iter()
441 .find(|(_, name)| *name == spec_name)
442 .map(|(var, _)| *var)
443 {
444 *func = LcnfArg::Var(spec_var);
445 *count += 1;
446 }
447 break;
448 }
449 }
450 }
451 }
452 }
453 if let LcnfLetValue::Lit(lit) = value {
454 local_consts.insert(*id, lit.clone());
455 }
456 if let LcnfLetValue::FVar(v) = value {
457 if let Some(fn_name) = local_fn_map.get(v).cloned() {
458 local_fn_map.insert(*id, fn_name);
459 }
460 }
461 self.redirect_inner(body, redirects, local_consts, local_fn_map, count);
462 }
463 LcnfExpr::Case { alts, default, .. } => {
464 for alt in alts.iter_mut() {
465 let mut alt_consts = local_consts.clone();
466 let mut alt_fn_map = local_fn_map.clone();
467 self.redirect_inner(
468 &mut alt.body,
469 redirects,
470 &mut alt_consts,
471 &mut alt_fn_map,
472 count,
473 );
474 }
475 if let Some(def) = default {
476 let mut def_consts = local_consts.clone();
477 let mut def_fn_map = local_fn_map.clone();
478 self.redirect_inner(def, redirects, &mut def_consts, &mut def_fn_map, count);
479 }
480 }
481 LcnfExpr::TailCall(func, args) => {
482 if let LcnfArg::Var(v) = func {
483 if let Some(fn_name) = local_fn_map.get(v).cloned() {
484 let const_args = Self::build_const_args(args, local_consts);
485 let closure_args = Self::build_closure_args(args, local_fn_map);
486 for (key, spec_name) in redirects {
487 if key.original == fn_name
488 && key.const_args == const_args
489 && (key.closure_args.is_empty() || key.closure_args == closure_args)
490 {
491 if let Some(spec_var) = local_fn_map
492 .iter()
493 .find(|(_, name)| *name == spec_name)
494 .map(|(var, _)| *var)
495 {
496 *func = LcnfArg::Var(spec_var);
497 *count += 1;
498 }
499 break;
500 }
501 }
502 }
503 }
504 }
505 LcnfExpr::Return(_) | LcnfExpr::Unreachable => {}
506 }
507 }
508 pub(super) fn build_const_args(
510 args: &[LcnfArg],
511 local_consts: &HashMap<LcnfVarId, LcnfLit>,
512 ) -> Vec<SpecConstArg> {
513 args.iter()
514 .map(|arg| match arg {
515 LcnfArg::Lit(LcnfLit::Nat(n)) => SpecConstArg::Nat(*n),
516 LcnfArg::Lit(LcnfLit::Str(s)) => SpecConstArg::Str(s.clone()),
517 LcnfArg::Var(v) => match local_consts.get(v) {
518 Some(LcnfLit::Nat(n)) => SpecConstArg::Nat(*n),
519 Some(LcnfLit::Str(s)) => SpecConstArg::Str(s.clone()),
520 None => SpecConstArg::Unknown,
521 },
522 _ => SpecConstArg::Unknown,
523 })
524 .collect()
525 }
526 pub(super) fn build_closure_args(
528 args: &[LcnfArg],
529 local_fn_map: &HashMap<LcnfVarId, String>,
530 ) -> Vec<SpecClosureArg> {
531 args.iter()
532 .enumerate()
533 .map(|(i, arg)| SpecClosureArg {
534 known_fn: match arg {
535 LcnfArg::Var(v) => local_fn_map.get(v).cloned(),
536 _ => None,
537 },
538 param_idx: i,
539 })
540 .collect()
541 }
542}
543#[allow(dead_code)]
545#[derive(Debug, Default)]
546pub struct SpecExtPassRegistry {
547 pub(super) configs: Vec<SpecExtPassConfig>,
548 pub(super) stats: Vec<SpecExtPassStats>,
549}
550impl SpecExtPassRegistry {
551 #[allow(dead_code)]
552 pub fn new() -> Self {
553 Self::default()
554 }
555 #[allow(dead_code)]
556 pub fn register(&mut self, c: SpecExtPassConfig) {
557 self.stats.push(SpecExtPassStats::new());
558 self.configs.push(c);
559 }
560 #[allow(dead_code)]
561 pub fn len(&self) -> usize {
562 self.configs.len()
563 }
564 #[allow(dead_code)]
565 pub fn is_empty(&self) -> bool {
566 self.configs.is_empty()
567 }
568 #[allow(dead_code)]
569 pub fn get(&self, i: usize) -> Option<&SpecExtPassConfig> {
570 self.configs.get(i)
571 }
572 #[allow(dead_code)]
573 pub fn get_stats(&self, i: usize) -> Option<&SpecExtPassStats> {
574 self.stats.get(i)
575 }
576 #[allow(dead_code)]
577 pub fn enabled_passes(&self) -> Vec<&SpecExtPassConfig> {
578 self.configs.iter().filter(|c| c.enabled).collect()
579 }
580 #[allow(dead_code)]
581 pub fn passes_in_phase(&self, ph: &SpecExtPassPhase) -> Vec<&SpecExtPassConfig> {
582 self.configs
583 .iter()
584 .filter(|c| c.enabled && &c.phase == ph)
585 .collect()
586 }
587 #[allow(dead_code)]
588 pub fn total_nodes_visited(&self) -> usize {
589 self.stats.iter().map(|s| s.nodes_visited).sum()
590 }
591 #[allow(dead_code)]
592 pub fn any_changed(&self) -> bool {
593 self.stats.iter().any(|s| s.changed)
594 }
595}
596#[allow(dead_code)]
598#[derive(Debug)]
599pub struct SpecExtCache {
600 pub(super) entries: Vec<(u64, Vec<u8>, bool, u32)>,
601 pub(super) cap: usize,
602 pub(super) total_hits: u64,
603 pub(super) total_misses: u64,
604}
605impl SpecExtCache {
606 #[allow(dead_code)]
607 pub fn new(cap: usize) -> Self {
608 Self {
609 entries: Vec::new(),
610 cap,
611 total_hits: 0,
612 total_misses: 0,
613 }
614 }
615 #[allow(dead_code)]
616 pub fn get(&mut self, key: u64) -> Option<&[u8]> {
617 for e in self.entries.iter_mut() {
618 if e.0 == key && e.2 {
619 e.3 += 1;
620 self.total_hits += 1;
621 return Some(&e.1);
622 }
623 }
624 self.total_misses += 1;
625 None
626 }
627 #[allow(dead_code)]
628 pub fn put(&mut self, key: u64, data: Vec<u8>) {
629 if self.entries.len() >= self.cap {
630 self.entries.retain(|e| e.2);
631 if self.entries.len() >= self.cap {
632 self.entries.remove(0);
633 }
634 }
635 self.entries.push((key, data, true, 0));
636 }
637 #[allow(dead_code)]
638 pub fn invalidate(&mut self) {
639 for e in self.entries.iter_mut() {
640 e.2 = false;
641 }
642 }
643 #[allow(dead_code)]
644 pub fn hit_rate(&self) -> f64 {
645 let t = self.total_hits + self.total_misses;
646 if t == 0 {
647 0.0
648 } else {
649 self.total_hits as f64 / t as f64
650 }
651 }
652 #[allow(dead_code)]
653 pub fn live_count(&self) -> usize {
654 self.entries.iter().filter(|e| e.2).count()
655 }
656}
657#[allow(dead_code)]
659#[derive(Debug, Clone)]
660pub struct SpecExtDepGraph {
661 pub(super) n: usize,
662 pub(super) adj: Vec<Vec<usize>>,
663 pub(super) rev: Vec<Vec<usize>>,
664 pub(super) edge_count: usize,
665}
666impl SpecExtDepGraph {
667 #[allow(dead_code)]
668 pub fn new(n: usize) -> Self {
669 Self {
670 n,
671 adj: vec![Vec::new(); n],
672 rev: vec![Vec::new(); n],
673 edge_count: 0,
674 }
675 }
676 #[allow(dead_code)]
677 pub fn add_edge(&mut self, from: usize, to: usize) {
678 if from < self.n && to < self.n {
679 if !self.adj[from].contains(&to) {
680 self.adj[from].push(to);
681 self.rev[to].push(from);
682 self.edge_count += 1;
683 }
684 }
685 }
686 #[allow(dead_code)]
687 pub fn succs(&self, n: usize) -> &[usize] {
688 self.adj.get(n).map(|v| v.as_slice()).unwrap_or(&[])
689 }
690 #[allow(dead_code)]
691 pub fn preds(&self, n: usize) -> &[usize] {
692 self.rev.get(n).map(|v| v.as_slice()).unwrap_or(&[])
693 }
694 #[allow(dead_code)]
695 pub fn topo_sort(&self) -> Option<Vec<usize>> {
696 let mut deg: Vec<usize> = (0..self.n).map(|i| self.rev[i].len()).collect();
697 let mut q: std::collections::VecDeque<usize> =
698 (0..self.n).filter(|&i| deg[i] == 0).collect();
699 let mut out = Vec::with_capacity(self.n);
700 while let Some(u) = q.pop_front() {
701 out.push(u);
702 for &v in &self.adj[u] {
703 deg[v] -= 1;
704 if deg[v] == 0 {
705 q.push_back(v);
706 }
707 }
708 }
709 if out.len() == self.n {
710 Some(out)
711 } else {
712 None
713 }
714 }
715 #[allow(dead_code)]
716 pub fn has_cycle(&self) -> bool {
717 self.topo_sort().is_none()
718 }
719 #[allow(dead_code)]
720 pub fn reachable(&self, start: usize) -> Vec<usize> {
721 let mut vis = vec![false; self.n];
722 let mut stk = vec![start];
723 let mut out = Vec::new();
724 while let Some(u) = stk.pop() {
725 if u < self.n && !vis[u] {
726 vis[u] = true;
727 out.push(u);
728 for &v in &self.adj[u] {
729 if !vis[v] {
730 stk.push(v);
731 }
732 }
733 }
734 }
735 out
736 }
737 #[allow(dead_code)]
738 pub fn scc(&self) -> Vec<Vec<usize>> {
739 let mut visited = vec![false; self.n];
740 let mut order = Vec::new();
741 for i in 0..self.n {
742 if !visited[i] {
743 let mut stk = vec![(i, 0usize)];
744 while let Some((u, idx)) = stk.last_mut() {
745 if !visited[*u] {
746 visited[*u] = true;
747 }
748 if *idx < self.adj[*u].len() {
749 let v = self.adj[*u][*idx];
750 *idx += 1;
751 if !visited[v] {
752 stk.push((v, 0));
753 }
754 } else {
755 order.push(*u);
756 stk.pop();
757 }
758 }
759 }
760 }
761 let mut comp = vec![usize::MAX; self.n];
762 let mut components: Vec<Vec<usize>> = Vec::new();
763 for &start in order.iter().rev() {
764 if comp[start] == usize::MAX {
765 let cid = components.len();
766 let mut component = Vec::new();
767 let mut stk = vec![start];
768 while let Some(u) = stk.pop() {
769 if comp[u] == usize::MAX {
770 comp[u] = cid;
771 component.push(u);
772 for &v in &self.rev[u] {
773 if comp[v] == usize::MAX {
774 stk.push(v);
775 }
776 }
777 }
778 }
779 components.push(component);
780 }
781 }
782 components
783 }
784 #[allow(dead_code)]
785 pub fn node_count(&self) -> usize {
786 self.n
787 }
788 #[allow(dead_code)]
789 pub fn edge_count(&self) -> usize {
790 self.edge_count
791 }
792}
793#[allow(dead_code)]
795#[derive(Debug, Clone)]
796pub struct SpecExtDomTree {
797 pub(super) idom: Vec<Option<usize>>,
798 pub(super) children: Vec<Vec<usize>>,
799 pub(super) depth: Vec<usize>,
800}
801impl SpecExtDomTree {
802 #[allow(dead_code)]
803 pub fn new(n: usize) -> Self {
804 Self {
805 idom: vec![None; n],
806 children: vec![Vec::new(); n],
807 depth: vec![0; n],
808 }
809 }
810 #[allow(dead_code)]
811 pub fn set_idom(&mut self, node: usize, dom: usize) {
812 if node < self.idom.len() {
813 self.idom[node] = Some(dom);
814 if dom < self.children.len() {
815 self.children[dom].push(node);
816 }
817 self.depth[node] = if dom < self.depth.len() {
818 self.depth[dom] + 1
819 } else {
820 1
821 };
822 }
823 }
824 #[allow(dead_code)]
825 pub fn dominates(&self, a: usize, mut b: usize) -> bool {
826 if a == b {
827 return true;
828 }
829 let n = self.idom.len();
830 for _ in 0..n {
831 match self.idom.get(b).copied().flatten() {
832 None => return false,
833 Some(p) if p == a => return true,
834 Some(p) if p == b => return false,
835 Some(p) => b = p,
836 }
837 }
838 false
839 }
840 #[allow(dead_code)]
841 pub fn children_of(&self, n: usize) -> &[usize] {
842 self.children.get(n).map(|v| v.as_slice()).unwrap_or(&[])
843 }
844 #[allow(dead_code)]
845 pub fn depth_of(&self, n: usize) -> usize {
846 self.depth.get(n).copied().unwrap_or(0)
847 }
848 #[allow(dead_code)]
849 pub fn lca(&self, mut a: usize, mut b: usize) -> usize {
850 let n = self.idom.len();
851 for _ in 0..(2 * n) {
852 if a == b {
853 return a;
854 }
855 if self.depth_of(a) > self.depth_of(b) {
856 a = self.idom.get(a).and_then(|x| *x).unwrap_or(a);
857 } else {
858 b = self.idom.get(b).and_then(|x| *x).unwrap_or(b);
859 }
860 }
861 0
862 }
863}
864#[allow(dead_code)]
865#[derive(Debug, Clone, PartialEq)]
866pub enum SpecPassPhase {
867 Analysis,
868 Transformation,
869 Verification,
870 Cleanup,
871}
872impl SpecPassPhase {
873 #[allow(dead_code)]
874 pub fn name(&self) -> &str {
875 match self {
876 SpecPassPhase::Analysis => "analysis",
877 SpecPassPhase::Transformation => "transformation",
878 SpecPassPhase::Verification => "verification",
879 SpecPassPhase::Cleanup => "cleanup",
880 }
881 }
882 #[allow(dead_code)]
883 pub fn is_modifying(&self) -> bool {
884 matches!(self, SpecPassPhase::Transformation | SpecPassPhase::Cleanup)
885 }
886}
887#[allow(dead_code)]
889#[derive(Debug, Clone, PartialEq, Eq, Hash)]
890pub enum SpecExtPassPhase {
891 Early,
892 Middle,
893 Late,
894 Finalize,
895}
896impl SpecExtPassPhase {
897 #[allow(dead_code)]
898 pub fn is_early(&self) -> bool {
899 matches!(self, Self::Early)
900 }
901 #[allow(dead_code)]
902 pub fn is_middle(&self) -> bool {
903 matches!(self, Self::Middle)
904 }
905 #[allow(dead_code)]
906 pub fn is_late(&self) -> bool {
907 matches!(self, Self::Late)
908 }
909 #[allow(dead_code)]
910 pub fn is_finalize(&self) -> bool {
911 matches!(self, Self::Finalize)
912 }
913 #[allow(dead_code)]
914 pub fn order(&self) -> u32 {
915 match self {
916 Self::Early => 0,
917 Self::Middle => 1,
918 Self::Late => 2,
919 Self::Finalize => 3,
920 }
921 }
922 #[allow(dead_code)]
923 pub fn from_order(n: u32) -> Option<Self> {
924 match n {
925 0 => Some(Self::Early),
926 1 => Some(Self::Middle),
927 2 => Some(Self::Late),
928 3 => Some(Self::Finalize),
929 _ => None,
930 }
931 }
932}
933#[allow(dead_code)]
935#[derive(Debug, Clone, Default)]
936pub struct SpecExtConstFolder {
937 pub(super) folds: usize,
938 pub(super) failures: usize,
939 pub(super) enabled: bool,
940}
941impl SpecExtConstFolder {
942 #[allow(dead_code)]
943 pub fn new() -> Self {
944 Self {
945 folds: 0,
946 failures: 0,
947 enabled: true,
948 }
949 }
950 #[allow(dead_code)]
951 pub fn add_i64(&mut self, a: i64, b: i64) -> Option<i64> {
952 self.folds += 1;
953 a.checked_add(b)
954 }
955 #[allow(dead_code)]
956 pub fn sub_i64(&mut self, a: i64, b: i64) -> Option<i64> {
957 self.folds += 1;
958 a.checked_sub(b)
959 }
960 #[allow(dead_code)]
961 pub fn mul_i64(&mut self, a: i64, b: i64) -> Option<i64> {
962 self.folds += 1;
963 a.checked_mul(b)
964 }
965 #[allow(dead_code)]
966 pub fn div_i64(&mut self, a: i64, b: i64) -> Option<i64> {
967 if b == 0 {
968 self.failures += 1;
969 None
970 } else {
971 self.folds += 1;
972 a.checked_div(b)
973 }
974 }
975 #[allow(dead_code)]
976 pub fn rem_i64(&mut self, a: i64, b: i64) -> Option<i64> {
977 if b == 0 {
978 self.failures += 1;
979 None
980 } else {
981 self.folds += 1;
982 a.checked_rem(b)
983 }
984 }
985 #[allow(dead_code)]
986 pub fn neg_i64(&mut self, a: i64) -> Option<i64> {
987 self.folds += 1;
988 a.checked_neg()
989 }
990 #[allow(dead_code)]
991 pub fn shl_i64(&mut self, a: i64, s: u32) -> Option<i64> {
992 if s >= 64 {
993 self.failures += 1;
994 None
995 } else {
996 self.folds += 1;
997 a.checked_shl(s)
998 }
999 }
1000 #[allow(dead_code)]
1001 pub fn shr_i64(&mut self, a: i64, s: u32) -> Option<i64> {
1002 if s >= 64 {
1003 self.failures += 1;
1004 None
1005 } else {
1006 self.folds += 1;
1007 a.checked_shr(s)
1008 }
1009 }
1010 #[allow(dead_code)]
1011 pub fn and_i64(&mut self, a: i64, b: i64) -> i64 {
1012 self.folds += 1;
1013 a & b
1014 }
1015 #[allow(dead_code)]
1016 pub fn or_i64(&mut self, a: i64, b: i64) -> i64 {
1017 self.folds += 1;
1018 a | b
1019 }
1020 #[allow(dead_code)]
1021 pub fn xor_i64(&mut self, a: i64, b: i64) -> i64 {
1022 self.folds += 1;
1023 a ^ b
1024 }
1025 #[allow(dead_code)]
1026 pub fn not_i64(&mut self, a: i64) -> i64 {
1027 self.folds += 1;
1028 !a
1029 }
1030 #[allow(dead_code)]
1031 pub fn fold_count(&self) -> usize {
1032 self.folds
1033 }
1034 #[allow(dead_code)]
1035 pub fn failure_count(&self) -> usize {
1036 self.failures
1037 }
1038 #[allow(dead_code)]
1039 pub fn enable(&mut self) {
1040 self.enabled = true;
1041 }
1042 #[allow(dead_code)]
1043 pub fn disable(&mut self) {
1044 self.enabled = false;
1045 }
1046 #[allow(dead_code)]
1047 pub fn is_enabled(&self) -> bool {
1048 self.enabled
1049 }
1050}
1051#[allow(dead_code)]
1052#[derive(Debug, Clone, Default)]
1053pub struct SpecPassStats {
1054 pub total_runs: u32,
1055 pub successful_runs: u32,
1056 pub total_changes: u64,
1057 pub time_ms: u64,
1058 pub iterations_used: u32,
1059}
1060impl SpecPassStats {
1061 #[allow(dead_code)]
1062 pub fn new() -> Self {
1063 Self::default()
1064 }
1065 #[allow(dead_code)]
1066 pub fn record_run(&mut self, changes: u64, time_ms: u64, iterations: u32) {
1067 self.total_runs += 1;
1068 self.successful_runs += 1;
1069 self.total_changes += changes;
1070 self.time_ms += time_ms;
1071 self.iterations_used = iterations;
1072 }
1073 #[allow(dead_code)]
1074 pub fn average_changes_per_run(&self) -> f64 {
1075 if self.total_runs == 0 {
1076 return 0.0;
1077 }
1078 self.total_changes as f64 / self.total_runs as f64
1079 }
1080 #[allow(dead_code)]
1081 pub fn success_rate(&self) -> f64 {
1082 if self.total_runs == 0 {
1083 return 0.0;
1084 }
1085 self.successful_runs as f64 / self.total_runs as f64
1086 }
1087 #[allow(dead_code)]
1088 pub fn format_summary(&self) -> String {
1089 format!(
1090 "Runs: {}/{}, Changes: {}, Time: {}ms",
1091 self.successful_runs, self.total_runs, self.total_changes, self.time_ms
1092 )
1093 }
1094}
1095#[allow(dead_code)]
1096pub struct SpecConstantFoldingHelper;
1097impl SpecConstantFoldingHelper {
1098 #[allow(dead_code)]
1099 pub fn fold_add_i64(a: i64, b: i64) -> Option<i64> {
1100 a.checked_add(b)
1101 }
1102 #[allow(dead_code)]
1103 pub fn fold_sub_i64(a: i64, b: i64) -> Option<i64> {
1104 a.checked_sub(b)
1105 }
1106 #[allow(dead_code)]
1107 pub fn fold_mul_i64(a: i64, b: i64) -> Option<i64> {
1108 a.checked_mul(b)
1109 }
1110 #[allow(dead_code)]
1111 pub fn fold_div_i64(a: i64, b: i64) -> Option<i64> {
1112 if b == 0 {
1113 None
1114 } else {
1115 a.checked_div(b)
1116 }
1117 }
1118 #[allow(dead_code)]
1119 pub fn fold_add_f64(a: f64, b: f64) -> f64 {
1120 a + b
1121 }
1122 #[allow(dead_code)]
1123 pub fn fold_mul_f64(a: f64, b: f64) -> f64 {
1124 a * b
1125 }
1126 #[allow(dead_code)]
1127 pub fn fold_neg_i64(a: i64) -> Option<i64> {
1128 a.checked_neg()
1129 }
1130 #[allow(dead_code)]
1131 pub fn fold_not_bool(a: bool) -> bool {
1132 !a
1133 }
1134 #[allow(dead_code)]
1135 pub fn fold_and_bool(a: bool, b: bool) -> bool {
1136 a && b
1137 }
1138 #[allow(dead_code)]
1139 pub fn fold_or_bool(a: bool, b: bool) -> bool {
1140 a || b
1141 }
1142 #[allow(dead_code)]
1143 pub fn fold_shl_i64(a: i64, b: u32) -> Option<i64> {
1144 a.checked_shl(b)
1145 }
1146 #[allow(dead_code)]
1147 pub fn fold_shr_i64(a: i64, b: u32) -> Option<i64> {
1148 a.checked_shr(b)
1149 }
1150 #[allow(dead_code)]
1151 pub fn fold_rem_i64(a: i64, b: i64) -> Option<i64> {
1152 if b == 0 {
1153 None
1154 } else {
1155 Some(a % b)
1156 }
1157 }
1158 #[allow(dead_code)]
1159 pub fn fold_bitand_i64(a: i64, b: i64) -> i64 {
1160 a & b
1161 }
1162 #[allow(dead_code)]
1163 pub fn fold_bitor_i64(a: i64, b: i64) -> i64 {
1164 a | b
1165 }
1166 #[allow(dead_code)]
1167 pub fn fold_bitxor_i64(a: i64, b: i64) -> i64 {
1168 a ^ b
1169 }
1170 #[allow(dead_code)]
1171 pub fn fold_bitnot_i64(a: i64) -> i64 {
1172 !a
1173 }
1174}
1175#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1177pub enum SpecConstArg {
1178 Nat(u64),
1180 Str(String),
1182 Unknown,
1184}
1185#[allow(dead_code)]
1186#[derive(Debug, Clone)]
1187pub struct SpecDominatorTree {
1188 pub idom: Vec<Option<u32>>,
1189 pub dom_children: Vec<Vec<u32>>,
1190 pub dom_depth: Vec<u32>,
1191}
1192impl SpecDominatorTree {
1193 #[allow(dead_code)]
1194 pub fn new(size: usize) -> Self {
1195 SpecDominatorTree {
1196 idom: vec![None; size],
1197 dom_children: vec![Vec::new(); size],
1198 dom_depth: vec![0; size],
1199 }
1200 }
1201 #[allow(dead_code)]
1202 pub fn set_idom(&mut self, node: usize, idom: u32) {
1203 self.idom[node] = Some(idom);
1204 }
1205 #[allow(dead_code)]
1206 pub fn dominates(&self, a: usize, b: usize) -> bool {
1207 if a == b {
1208 return true;
1209 }
1210 let mut cur = b;
1211 loop {
1212 match self.idom[cur] {
1213 Some(parent) if parent as usize == a => return true,
1214 Some(parent) if parent as usize == cur => return false,
1215 Some(parent) => cur = parent as usize,
1216 None => return false,
1217 }
1218 }
1219 }
1220 #[allow(dead_code)]
1221 pub fn depth(&self, node: usize) -> u32 {
1222 self.dom_depth.get(node).copied().unwrap_or(0)
1223 }
1224}
1225#[allow(dead_code)]
1226#[derive(Debug, Clone)]
1227pub struct SpecPassConfig {
1228 pub phase: SpecPassPhase,
1229 pub enabled: bool,
1230 pub max_iterations: u32,
1231 pub debug_output: bool,
1232 pub pass_name: String,
1233}
1234impl SpecPassConfig {
1235 #[allow(dead_code)]
1236 pub fn new(name: impl Into<String>, phase: SpecPassPhase) -> Self {
1237 SpecPassConfig {
1238 phase,
1239 enabled: true,
1240 max_iterations: 10,
1241 debug_output: false,
1242 pass_name: name.into(),
1243 }
1244 }
1245 #[allow(dead_code)]
1246 pub fn disabled(mut self) -> Self {
1247 self.enabled = false;
1248 self
1249 }
1250 #[allow(dead_code)]
1251 pub fn with_debug(mut self) -> Self {
1252 self.debug_output = true;
1253 self
1254 }
1255 #[allow(dead_code)]
1256 pub fn max_iter(mut self, n: u32) -> Self {
1257 self.max_iterations = n;
1258 self
1259 }
1260}
1261#[allow(dead_code)]
1262#[derive(Debug, Clone)]
1263pub struct SpecLivenessInfo {
1264 pub live_in: Vec<std::collections::HashSet<u32>>,
1265 pub live_out: Vec<std::collections::HashSet<u32>>,
1266 pub defs: Vec<std::collections::HashSet<u32>>,
1267 pub uses: Vec<std::collections::HashSet<u32>>,
1268}
1269impl SpecLivenessInfo {
1270 #[allow(dead_code)]
1271 pub fn new(block_count: usize) -> Self {
1272 SpecLivenessInfo {
1273 live_in: vec![std::collections::HashSet::new(); block_count],
1274 live_out: vec![std::collections::HashSet::new(); block_count],
1275 defs: vec![std::collections::HashSet::new(); block_count],
1276 uses: vec![std::collections::HashSet::new(); block_count],
1277 }
1278 }
1279 #[allow(dead_code)]
1280 pub fn add_def(&mut self, block: usize, var: u32) {
1281 if block < self.defs.len() {
1282 self.defs[block].insert(var);
1283 }
1284 }
1285 #[allow(dead_code)]
1286 pub fn add_use(&mut self, block: usize, var: u32) {
1287 if block < self.uses.len() {
1288 self.uses[block].insert(var);
1289 }
1290 }
1291 #[allow(dead_code)]
1292 pub fn is_live_in(&self, block: usize, var: u32) -> bool {
1293 self.live_in
1294 .get(block)
1295 .map(|s| s.contains(&var))
1296 .unwrap_or(false)
1297 }
1298 #[allow(dead_code)]
1299 pub fn is_live_out(&self, block: usize, var: u32) -> bool {
1300 self.live_out
1301 .get(block)
1302 .map(|s| s.contains(&var))
1303 .unwrap_or(false)
1304 }
1305}
1306#[allow(dead_code)]
1308#[derive(Debug, Clone)]
1309pub struct SpecExtPassConfig {
1310 pub name: String,
1311 pub phase: SpecExtPassPhase,
1312 pub enabled: bool,
1313 pub max_iterations: usize,
1314 pub debug: u32,
1315 pub timeout_ms: Option<u64>,
1316}
1317impl SpecExtPassConfig {
1318 #[allow(dead_code)]
1319 pub fn new(name: impl Into<String>) -> Self {
1320 Self {
1321 name: name.into(),
1322 phase: SpecExtPassPhase::Middle,
1323 enabled: true,
1324 max_iterations: 100,
1325 debug: 0,
1326 timeout_ms: None,
1327 }
1328 }
1329 #[allow(dead_code)]
1330 pub fn with_phase(mut self, phase: SpecExtPassPhase) -> Self {
1331 self.phase = phase;
1332 self
1333 }
1334 #[allow(dead_code)]
1335 pub fn with_max_iter(mut self, n: usize) -> Self {
1336 self.max_iterations = n;
1337 self
1338 }
1339 #[allow(dead_code)]
1340 pub fn with_debug(mut self, d: u32) -> Self {
1341 self.debug = d;
1342 self
1343 }
1344 #[allow(dead_code)]
1345 pub fn disabled(mut self) -> Self {
1346 self.enabled = false;
1347 self
1348 }
1349 #[allow(dead_code)]
1350 pub fn with_timeout(mut self, ms: u64) -> Self {
1351 self.timeout_ms = Some(ms);
1352 self
1353 }
1354 #[allow(dead_code)]
1355 pub fn is_debug_enabled(&self) -> bool {
1356 self.debug > 0
1357 }
1358}
1359#[allow(dead_code)]
1360#[derive(Debug, Clone)]
1361pub struct SpecWorklist {
1362 pub(super) items: std::collections::VecDeque<u32>,
1363 pub(super) in_worklist: std::collections::HashSet<u32>,
1364}
1365impl SpecWorklist {
1366 #[allow(dead_code)]
1367 pub fn new() -> Self {
1368 SpecWorklist {
1369 items: std::collections::VecDeque::new(),
1370 in_worklist: std::collections::HashSet::new(),
1371 }
1372 }
1373 #[allow(dead_code)]
1374 pub fn push(&mut self, item: u32) -> bool {
1375 if self.in_worklist.insert(item) {
1376 self.items.push_back(item);
1377 true
1378 } else {
1379 false
1380 }
1381 }
1382 #[allow(dead_code)]
1383 pub fn pop(&mut self) -> Option<u32> {
1384 let item = self.items.pop_front()?;
1385 self.in_worklist.remove(&item);
1386 Some(item)
1387 }
1388 #[allow(dead_code)]
1389 pub fn is_empty(&self) -> bool {
1390 self.items.is_empty()
1391 }
1392 #[allow(dead_code)]
1393 pub fn len(&self) -> usize {
1394 self.items.len()
1395 }
1396 #[allow(dead_code)]
1397 pub fn contains(&self, item: u32) -> bool {
1398 self.in_worklist.contains(&item)
1399 }
1400}
1401#[allow(dead_code)]
1403#[derive(Debug, Clone)]
1404pub struct SpecExtWorklist {
1405 pub(super) items: std::collections::VecDeque<usize>,
1406 pub(super) present: Vec<bool>,
1407}
1408impl SpecExtWorklist {
1409 #[allow(dead_code)]
1410 pub fn new(capacity: usize) -> Self {
1411 Self {
1412 items: std::collections::VecDeque::new(),
1413 present: vec![false; capacity],
1414 }
1415 }
1416 #[allow(dead_code)]
1417 pub fn push(&mut self, id: usize) {
1418 if id < self.present.len() && !self.present[id] {
1419 self.present[id] = true;
1420 self.items.push_back(id);
1421 }
1422 }
1423 #[allow(dead_code)]
1424 pub fn push_front(&mut self, id: usize) {
1425 if id < self.present.len() && !self.present[id] {
1426 self.present[id] = true;
1427 self.items.push_front(id);
1428 }
1429 }
1430 #[allow(dead_code)]
1431 pub fn pop(&mut self) -> Option<usize> {
1432 let id = self.items.pop_front()?;
1433 if id < self.present.len() {
1434 self.present[id] = false;
1435 }
1436 Some(id)
1437 }
1438 #[allow(dead_code)]
1439 pub fn is_empty(&self) -> bool {
1440 self.items.is_empty()
1441 }
1442 #[allow(dead_code)]
1443 pub fn len(&self) -> usize {
1444 self.items.len()
1445 }
1446 #[allow(dead_code)]
1447 pub fn contains(&self, id: usize) -> bool {
1448 id < self.present.len() && self.present[id]
1449 }
1450 #[allow(dead_code)]
1451 pub fn drain_all(&mut self) -> Vec<usize> {
1452 let v: Vec<usize> = self.items.drain(..).collect();
1453 for &id in &v {
1454 if id < self.present.len() {
1455 self.present[id] = false;
1456 }
1457 }
1458 v
1459 }
1460}
1461#[derive(Debug, Clone, Default)]
1463pub struct SpecializationCache {
1464 pub(super) entries: HashMap<SpecializationKey, String>,
1466 pub(super) total_growth: usize,
1468}
1469impl SpecializationCache {
1470 pub(super) fn new() -> Self {
1471 SpecializationCache {
1472 entries: HashMap::new(),
1473 total_growth: 0,
1474 }
1475 }
1476 pub(super) fn lookup(&self, key: &SpecializationKey) -> Option<&str> {
1477 self.entries.get(key).map(|s| s.as_str())
1478 }
1479 pub(super) fn insert(&mut self, key: SpecializationKey, name: String, growth: usize) {
1480 self.entries.insert(key, name);
1481 self.total_growth += growth;
1482 }
1483 pub(super) fn specialization_count(&self, original: &str) -> usize {
1484 self.entries
1485 .keys()
1486 .filter(|k| k.original == original)
1487 .count()
1488 }
1489}
1490#[derive(Debug, Clone)]
1492pub struct SizeBudget {
1493 pub(super) original_total: usize,
1494 pub(super) max_total: usize,
1495 pub(super) current_growth: usize,
1496}
1497impl SizeBudget {
1498 pub(super) fn new(original_total: usize, growth_factor: f64) -> Self {
1499 SizeBudget {
1500 original_total,
1501 max_total: (original_total as f64 * growth_factor) as usize,
1502 current_growth: 0,
1503 }
1504 }
1505 pub(super) fn can_afford(&self, additional: usize) -> bool {
1506 self.original_total + self.current_growth + additional <= self.max_total
1507 }
1508 pub(super) fn spend(&mut self, amount: usize) {
1509 self.current_growth += amount;
1510 }
1511 pub(super) fn remaining(&self) -> usize {
1512 self.max_total
1513 .saturating_sub(self.original_total + self.current_growth)
1514 }
1515}
1516#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1518pub enum SpecTypeArg {
1519 Concrete(LcnfType),
1521 Poly,
1523}
1524#[allow(dead_code)]
1525#[derive(Debug, Clone)]
1526pub struct SpecDepGraph {
1527 pub(super) nodes: Vec<u32>,
1528 pub(super) edges: Vec<(u32, u32)>,
1529}
1530impl SpecDepGraph {
1531 #[allow(dead_code)]
1532 pub fn new() -> Self {
1533 SpecDepGraph {
1534 nodes: Vec::new(),
1535 edges: Vec::new(),
1536 }
1537 }
1538 #[allow(dead_code)]
1539 pub fn add_node(&mut self, id: u32) {
1540 if !self.nodes.contains(&id) {
1541 self.nodes.push(id);
1542 }
1543 }
1544 #[allow(dead_code)]
1545 pub fn add_dep(&mut self, dep: u32, dependent: u32) {
1546 self.add_node(dep);
1547 self.add_node(dependent);
1548 self.edges.push((dep, dependent));
1549 }
1550 #[allow(dead_code)]
1551 pub fn dependents_of(&self, node: u32) -> Vec<u32> {
1552 self.edges
1553 .iter()
1554 .filter(|(d, _)| *d == node)
1555 .map(|(_, dep)| *dep)
1556 .collect()
1557 }
1558 #[allow(dead_code)]
1559 pub fn dependencies_of(&self, node: u32) -> Vec<u32> {
1560 self.edges
1561 .iter()
1562 .filter(|(_, dep)| *dep == node)
1563 .map(|(d, _)| *d)
1564 .collect()
1565 }
1566 #[allow(dead_code)]
1567 pub fn topological_sort(&self) -> Vec<u32> {
1568 let mut in_degree: std::collections::HashMap<u32, u32> = std::collections::HashMap::new();
1569 for &n in &self.nodes {
1570 in_degree.insert(n, 0);
1571 }
1572 for (_, dep) in &self.edges {
1573 *in_degree.entry(*dep).or_insert(0) += 1;
1574 }
1575 let mut queue: std::collections::VecDeque<u32> = self
1576 .nodes
1577 .iter()
1578 .filter(|&&n| in_degree[&n] == 0)
1579 .copied()
1580 .collect();
1581 let mut result = Vec::new();
1582 while let Some(node) = queue.pop_front() {
1583 result.push(node);
1584 for dep in self.dependents_of(node) {
1585 let cnt = in_degree.entry(dep).or_insert(0);
1586 *cnt = cnt.saturating_sub(1);
1587 if *cnt == 0 {
1588 queue.push_back(dep);
1589 }
1590 }
1591 }
1592 result
1593 }
1594 #[allow(dead_code)]
1595 pub fn has_cycle(&self) -> bool {
1596 self.topological_sort().len() < self.nodes.len()
1597 }
1598}
1599pub struct NumericSpecializer {
1601 pub(super) numeric_ops: HashSet<String>,
1603}
1604impl NumericSpecializer {
1605 pub(super) fn new() -> Self {
1606 let mut ops = HashSet::new();
1607 ops.insert("Nat.add".to_string());
1608 ops.insert("Nat.sub".to_string());
1609 ops.insert("Nat.mul".to_string());
1610 ops.insert("Nat.div".to_string());
1611 ops.insert("Nat.mod".to_string());
1612 ops.insert("Nat.beq".to_string());
1613 ops.insert("Nat.ble".to_string());
1614 ops.insert("Nat.blt".to_string());
1615 ops.insert("Nat.decEq".to_string());
1616 ops.insert("Nat.zero".to_string());
1617 ops.insert("Nat.succ".to_string());
1618 NumericSpecializer { numeric_ops: ops }
1619 }
1620 pub(super) fn is_numeric_op(&self, name: &str) -> bool {
1621 self.numeric_ops.contains(name)
1622 }
1623 pub(super) fn specialize_nat_to_u64(&self, ty: &LcnfType) -> LcnfType {
1624 match ty {
1625 LcnfType::Nat => LcnfType::Nat,
1626 LcnfType::Fun(params, ret) => {
1627 let new_params: Vec<LcnfType> = params
1628 .iter()
1629 .map(|p| self.specialize_nat_to_u64(p))
1630 .collect();
1631 let new_ret = self.specialize_nat_to_u64(ret);
1632 LcnfType::Fun(new_params, Box::new(new_ret))
1633 }
1634 LcnfType::Ctor(name, args) => {
1635 let new_args: Vec<LcnfType> =
1636 args.iter().map(|a| self.specialize_nat_to_u64(a)).collect();
1637 LcnfType::Ctor(name.clone(), new_args)
1638 }
1639 other => other.clone(),
1640 }
1641 }
1642}
1643#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1645pub struct SpecializationKey {
1646 pub original: String,
1648 pub type_args: Vec<SpecTypeArg>,
1650 pub const_args: Vec<SpecConstArg>,
1652 pub closure_args: Vec<SpecClosureArg>,
1654}
1655impl SpecializationKey {
1656 pub(super) fn is_trivial(&self) -> bool {
1657 self.type_args
1658 .iter()
1659 .all(|a| matches!(a, SpecTypeArg::Poly))
1660 && self
1661 .const_args
1662 .iter()
1663 .all(|a| matches!(a, SpecConstArg::Unknown))
1664 && self.closure_args.iter().all(|a| a.known_fn.is_none())
1665 }
1666 pub(super) fn mangled_name(&self) -> String {
1667 let mut name = self.original.clone();
1668 for (i, ta) in self.type_args.iter().enumerate() {
1669 if let SpecTypeArg::Concrete(ty) = ta {
1670 name.push_str(&format!("_T{}_{}", i, type_suffix(ty)));
1671 }
1672 }
1673 for (i, ca) in self.const_args.iter().enumerate() {
1674 match ca {
1675 SpecConstArg::Nat(n) => name.push_str(&format!("_C{}_N{}", i, n)),
1676 SpecConstArg::Str(s) => {
1677 let short = if s.len() > 8 { &s[..8] } else { s.as_str() };
1678 name.push_str(&format!("_C{}_S{}", i, short));
1679 }
1680 SpecConstArg::Unknown => {}
1681 }
1682 }
1683 for ca in &self.closure_args {
1684 if let Some(fn_name) = &ca.known_fn {
1685 name.push_str(&format!("_F{}", fn_name));
1686 }
1687 }
1688 name
1689 }
1690}
1691#[allow(dead_code)]
1693#[derive(Debug, Clone, Default)]
1694pub struct SpecExtLiveness {
1695 pub live_in: Vec<Vec<usize>>,
1696 pub live_out: Vec<Vec<usize>>,
1697 pub defs: Vec<Vec<usize>>,
1698 pub uses: Vec<Vec<usize>>,
1699}
1700impl SpecExtLiveness {
1701 #[allow(dead_code)]
1702 pub fn new(n: usize) -> Self {
1703 Self {
1704 live_in: vec![Vec::new(); n],
1705 live_out: vec![Vec::new(); n],
1706 defs: vec![Vec::new(); n],
1707 uses: vec![Vec::new(); n],
1708 }
1709 }
1710 #[allow(dead_code)]
1711 pub fn live_in(&self, b: usize, v: usize) -> bool {
1712 self.live_in.get(b).map(|s| s.contains(&v)).unwrap_or(false)
1713 }
1714 #[allow(dead_code)]
1715 pub fn live_out(&self, b: usize, v: usize) -> bool {
1716 self.live_out
1717 .get(b)
1718 .map(|s| s.contains(&v))
1719 .unwrap_or(false)
1720 }
1721 #[allow(dead_code)]
1722 pub fn add_def(&mut self, b: usize, v: usize) {
1723 if let Some(s) = self.defs.get_mut(b) {
1724 if !s.contains(&v) {
1725 s.push(v);
1726 }
1727 }
1728 }
1729 #[allow(dead_code)]
1730 pub fn add_use(&mut self, b: usize, v: usize) {
1731 if let Some(s) = self.uses.get_mut(b) {
1732 if !s.contains(&v) {
1733 s.push(v);
1734 }
1735 }
1736 }
1737 #[allow(dead_code)]
1738 pub fn var_is_used_in_block(&self, b: usize, v: usize) -> bool {
1739 self.uses.get(b).map(|s| s.contains(&v)).unwrap_or(false)
1740 }
1741 #[allow(dead_code)]
1742 pub fn var_is_def_in_block(&self, b: usize, v: usize) -> bool {
1743 self.defs.get(b).map(|s| s.contains(&v)).unwrap_or(false)
1744 }
1745}
1746#[allow(dead_code)]
1748#[derive(Debug, Clone, Default)]
1749pub struct SpecExtPassStats {
1750 pub iterations: usize,
1751 pub changed: bool,
1752 pub nodes_visited: usize,
1753 pub nodes_modified: usize,
1754 pub time_ms: u64,
1755 pub memory_bytes: usize,
1756 pub errors: usize,
1757}
1758impl SpecExtPassStats {
1759 #[allow(dead_code)]
1760 pub fn new() -> Self {
1761 Self::default()
1762 }
1763 #[allow(dead_code)]
1764 pub fn visit(&mut self) {
1765 self.nodes_visited += 1;
1766 }
1767 #[allow(dead_code)]
1768 pub fn modify(&mut self) {
1769 self.nodes_modified += 1;
1770 self.changed = true;
1771 }
1772 #[allow(dead_code)]
1773 pub fn iterate(&mut self) {
1774 self.iterations += 1;
1775 }
1776 #[allow(dead_code)]
1777 pub fn error(&mut self) {
1778 self.errors += 1;
1779 }
1780 #[allow(dead_code)]
1781 pub fn efficiency(&self) -> f64 {
1782 if self.nodes_visited == 0 {
1783 0.0
1784 } else {
1785 self.nodes_modified as f64 / self.nodes_visited as f64
1786 }
1787 }
1788 #[allow(dead_code)]
1789 pub fn merge(&mut self, o: &SpecExtPassStats) {
1790 self.iterations += o.iterations;
1791 self.changed |= o.changed;
1792 self.nodes_visited += o.nodes_visited;
1793 self.nodes_modified += o.nodes_modified;
1794 self.time_ms += o.time_ms;
1795 self.memory_bytes = self.memory_bytes.max(o.memory_bytes);
1796 self.errors += o.errors;
1797 }
1798}
1799#[derive(Debug, Clone)]
1801pub struct SpecCallSite {
1802 pub(super) callee: String,
1804 pub(super) call_idx: usize,
1806 pub(super) type_args: Vec<SpecTypeArg>,
1808 pub(super) const_args: Vec<SpecConstArg>,
1810 pub(super) closure_args: Vec<SpecClosureArg>,
1812 pub(super) callee_var: Option<LcnfVarId>,
1814}
1815#[allow(dead_code)]
1816#[derive(Debug, Clone)]
1817pub struct SpecAnalysisCache {
1818 pub(super) entries: std::collections::HashMap<String, SpecCacheEntry>,
1819 pub(super) max_size: usize,
1820 pub(super) hits: u64,
1821 pub(super) misses: u64,
1822}
1823impl SpecAnalysisCache {
1824 #[allow(dead_code)]
1825 pub fn new(max_size: usize) -> Self {
1826 SpecAnalysisCache {
1827 entries: std::collections::HashMap::new(),
1828 max_size,
1829 hits: 0,
1830 misses: 0,
1831 }
1832 }
1833 #[allow(dead_code)]
1834 pub fn get(&mut self, key: &str) -> Option<&SpecCacheEntry> {
1835 if self.entries.contains_key(key) {
1836 self.hits += 1;
1837 self.entries.get(key)
1838 } else {
1839 self.misses += 1;
1840 None
1841 }
1842 }
1843 #[allow(dead_code)]
1844 pub fn insert(&mut self, key: String, data: Vec<u8>) {
1845 if self.entries.len() >= self.max_size {
1846 if let Some(oldest) = self.entries.keys().next().cloned() {
1847 self.entries.remove(&oldest);
1848 }
1849 }
1850 self.entries.insert(
1851 key.clone(),
1852 SpecCacheEntry {
1853 key,
1854 data,
1855 timestamp: 0,
1856 valid: true,
1857 },
1858 );
1859 }
1860 #[allow(dead_code)]
1861 pub fn invalidate(&mut self, key: &str) {
1862 if let Some(entry) = self.entries.get_mut(key) {
1863 entry.valid = false;
1864 }
1865 }
1866 #[allow(dead_code)]
1867 pub fn clear(&mut self) {
1868 self.entries.clear();
1869 }
1870 #[allow(dead_code)]
1871 pub fn hit_rate(&self) -> f64 {
1872 let total = self.hits + self.misses;
1873 if total == 0 {
1874 return 0.0;
1875 }
1876 self.hits as f64 / total as f64
1877 }
1878 #[allow(dead_code)]
1879 pub fn size(&self) -> usize {
1880 self.entries.len()
1881 }
1882}