1use crate::lcnf::*;
6
7use std::collections::{HashMap, HashSet, VecDeque};
8
9#[allow(dead_code)]
10#[derive(Debug, Clone)]
11pub struct LLVMLivenessInfo {
12 pub live_in: Vec<std::collections::HashSet<u32>>,
13 pub live_out: Vec<std::collections::HashSet<u32>>,
14 pub defs: Vec<std::collections::HashSet<u32>>,
15 pub uses: Vec<std::collections::HashSet<u32>>,
16}
17impl LLVMLivenessInfo {
18 #[allow(dead_code)]
19 pub fn new(block_count: usize) -> Self {
20 LLVMLivenessInfo {
21 live_in: vec![std::collections::HashSet::new(); block_count],
22 live_out: vec![std::collections::HashSet::new(); block_count],
23 defs: vec![std::collections::HashSet::new(); block_count],
24 uses: vec![std::collections::HashSet::new(); block_count],
25 }
26 }
27 #[allow(dead_code)]
28 pub fn add_def(&mut self, block: usize, var: u32) {
29 if block < self.defs.len() {
30 self.defs[block].insert(var);
31 }
32 }
33 #[allow(dead_code)]
34 pub fn add_use(&mut self, block: usize, var: u32) {
35 if block < self.uses.len() {
36 self.uses[block].insert(var);
37 }
38 }
39 #[allow(dead_code)]
40 pub fn is_live_in(&self, block: usize, var: u32) -> bool {
41 self.live_in
42 .get(block)
43 .map(|s| s.contains(&var))
44 .unwrap_or(false)
45 }
46 #[allow(dead_code)]
47 pub fn is_live_out(&self, block: usize, var: u32) -> bool {
48 self.live_out
49 .get(block)
50 .map(|s| s.contains(&var))
51 .unwrap_or(false)
52 }
53}
54#[derive(Debug, Clone, PartialEq)]
56pub struct LlvmFunc {
57 pub name: String,
59 pub ret_ty: LlvmType,
61 pub params: Vec<(LlvmType, String)>,
63 pub body: Vec<LlvmInstr>,
65 pub linkage: LlvmLinkage,
67 pub attrs: Vec<LlvmAttr>,
69 pub is_declare: bool,
71}
72#[derive(Debug, Clone, PartialEq, Eq)]
74pub enum LlvmLinkage {
75 Private,
77 Internal,
79 External,
81 LinkOnce,
83 Weak,
85 Common,
87 Appending,
89 ExternWeak,
91 LinkOnceOdr,
93 WeakOdr,
95}
96#[derive(Debug, Clone, PartialEq, Eq)]
98pub enum FcmpPred {
99 Oeq,
101 One,
103 Olt,
105 Ogt,
107 Ole,
109 Oge,
111 Uno,
113 Ord,
115 True_,
117 False_,
119}
120#[derive(Debug, Clone, PartialEq)]
122pub enum LlvmType {
123 Void,
125 I1,
127 I8,
129 I16,
131 I32,
133 I64,
135 I128,
137 F32,
139 F64,
141 F128,
143 Ptr,
145 Array(u64, Box<LlvmType>),
147 Struct(Vec<LlvmType>),
149 Vector(u32, Box<LlvmType>),
151 FuncType {
153 ret: Box<LlvmType>,
154 params: Vec<LlvmType>,
155 variadic: bool,
156 },
157 Named(String),
159}
160#[allow(dead_code)]
162#[derive(Debug, Default)]
163pub struct LLVMExtPassRegistry {
164 pub(super) configs: Vec<LLVMExtPassConfig>,
165 pub(super) stats: Vec<LLVMExtPassStats>,
166}
167impl LLVMExtPassRegistry {
168 #[allow(dead_code)]
169 pub fn new() -> Self {
170 Self::default()
171 }
172 #[allow(dead_code)]
173 pub fn register(&mut self, c: LLVMExtPassConfig) {
174 self.stats.push(LLVMExtPassStats::new());
175 self.configs.push(c);
176 }
177 #[allow(dead_code)]
178 pub fn len(&self) -> usize {
179 self.configs.len()
180 }
181 #[allow(dead_code)]
182 pub fn is_empty(&self) -> bool {
183 self.configs.is_empty()
184 }
185 #[allow(dead_code)]
186 pub fn get(&self, i: usize) -> Option<&LLVMExtPassConfig> {
187 self.configs.get(i)
188 }
189 #[allow(dead_code)]
190 pub fn get_stats(&self, i: usize) -> Option<&LLVMExtPassStats> {
191 self.stats.get(i)
192 }
193 #[allow(dead_code)]
194 pub fn enabled_passes(&self) -> Vec<&LLVMExtPassConfig> {
195 self.configs.iter().filter(|c| c.enabled).collect()
196 }
197 #[allow(dead_code)]
198 pub fn passes_in_phase(&self, ph: &LLVMExtPassPhase) -> Vec<&LLVMExtPassConfig> {
199 self.configs
200 .iter()
201 .filter(|c| c.enabled && &c.phase == ph)
202 .collect()
203 }
204 #[allow(dead_code)]
205 pub fn total_nodes_visited(&self) -> usize {
206 self.stats.iter().map(|s| s.nodes_visited).sum()
207 }
208 #[allow(dead_code)]
209 pub fn any_changed(&self) -> bool {
210 self.stats.iter().any(|s| s.changed)
211 }
212}
213#[allow(dead_code)]
215#[derive(Debug, Clone, Default)]
216pub struct LLVMExtLiveness {
217 pub live_in: Vec<Vec<usize>>,
218 pub live_out: Vec<Vec<usize>>,
219 pub defs: Vec<Vec<usize>>,
220 pub uses: Vec<Vec<usize>>,
221}
222impl LLVMExtLiveness {
223 #[allow(dead_code)]
224 pub fn new(n: usize) -> Self {
225 Self {
226 live_in: vec![Vec::new(); n],
227 live_out: vec![Vec::new(); n],
228 defs: vec![Vec::new(); n],
229 uses: vec![Vec::new(); n],
230 }
231 }
232 #[allow(dead_code)]
233 pub fn live_in(&self, b: usize, v: usize) -> bool {
234 self.live_in.get(b).map(|s| s.contains(&v)).unwrap_or(false)
235 }
236 #[allow(dead_code)]
237 pub fn live_out(&self, b: usize, v: usize) -> bool {
238 self.live_out
239 .get(b)
240 .map(|s| s.contains(&v))
241 .unwrap_or(false)
242 }
243 #[allow(dead_code)]
244 pub fn add_def(&mut self, b: usize, v: usize) {
245 if let Some(s) = self.defs.get_mut(b) {
246 if !s.contains(&v) {
247 s.push(v);
248 }
249 }
250 }
251 #[allow(dead_code)]
252 pub fn add_use(&mut self, b: usize, v: usize) {
253 if let Some(s) = self.uses.get_mut(b) {
254 if !s.contains(&v) {
255 s.push(v);
256 }
257 }
258 }
259 #[allow(dead_code)]
260 pub fn var_is_used_in_block(&self, b: usize, v: usize) -> bool {
261 self.uses.get(b).map(|s| s.contains(&v)).unwrap_or(false)
262 }
263 #[allow(dead_code)]
264 pub fn var_is_def_in_block(&self, b: usize, v: usize) -> bool {
265 self.defs.get(b).map(|s| s.contains(&v)).unwrap_or(false)
266 }
267}
268#[allow(dead_code)]
269#[derive(Debug, Clone, Default)]
270pub struct LLVMPassStats {
271 pub total_runs: u32,
272 pub successful_runs: u32,
273 pub total_changes: u64,
274 pub time_ms: u64,
275 pub iterations_used: u32,
276}
277impl LLVMPassStats {
278 #[allow(dead_code)]
279 pub fn new() -> Self {
280 Self::default()
281 }
282 #[allow(dead_code)]
283 pub fn record_run(&mut self, changes: u64, time_ms: u64, iterations: u32) {
284 self.total_runs += 1;
285 self.successful_runs += 1;
286 self.total_changes += changes;
287 self.time_ms += time_ms;
288 self.iterations_used = iterations;
289 }
290 #[allow(dead_code)]
291 pub fn average_changes_per_run(&self) -> f64 {
292 if self.total_runs == 0 {
293 return 0.0;
294 }
295 self.total_changes as f64 / self.total_runs as f64
296 }
297 #[allow(dead_code)]
298 pub fn success_rate(&self) -> f64 {
299 if self.total_runs == 0 {
300 return 0.0;
301 }
302 self.successful_runs as f64 / self.total_runs as f64
303 }
304 #[allow(dead_code)]
305 pub fn format_summary(&self) -> String {
306 format!(
307 "Runs: {}/{}, Changes: {}, Time: {}ms",
308 self.successful_runs, self.total_runs, self.total_changes, self.time_ms
309 )
310 }
311}
312#[allow(dead_code)]
314#[derive(Debug, Clone)]
315pub struct LLVMExtWorklist {
316 pub(super) items: std::collections::VecDeque<usize>,
317 pub(super) present: Vec<bool>,
318}
319impl LLVMExtWorklist {
320 #[allow(dead_code)]
321 pub fn new(capacity: usize) -> Self {
322 Self {
323 items: std::collections::VecDeque::new(),
324 present: vec![false; capacity],
325 }
326 }
327 #[allow(dead_code)]
328 pub fn push(&mut self, id: usize) {
329 if id < self.present.len() && !self.present[id] {
330 self.present[id] = true;
331 self.items.push_back(id);
332 }
333 }
334 #[allow(dead_code)]
335 pub fn push_front(&mut self, id: usize) {
336 if id < self.present.len() && !self.present[id] {
337 self.present[id] = true;
338 self.items.push_front(id);
339 }
340 }
341 #[allow(dead_code)]
342 pub fn pop(&mut self) -> Option<usize> {
343 let id = self.items.pop_front()?;
344 if id < self.present.len() {
345 self.present[id] = false;
346 }
347 Some(id)
348 }
349 #[allow(dead_code)]
350 pub fn is_empty(&self) -> bool {
351 self.items.is_empty()
352 }
353 #[allow(dead_code)]
354 pub fn len(&self) -> usize {
355 self.items.len()
356 }
357 #[allow(dead_code)]
358 pub fn contains(&self, id: usize) -> bool {
359 id < self.present.len() && self.present[id]
360 }
361 #[allow(dead_code)]
362 pub fn drain_all(&mut self) -> Vec<usize> {
363 let v: Vec<usize> = self.items.drain(..).collect();
364 for &id in &v {
365 if id < self.present.len() {
366 self.present[id] = false;
367 }
368 }
369 v
370 }
371}
372#[derive(Debug, Clone, Default)]
374pub struct LlvmModule {
375 pub source_filename: String,
377 pub target_triple: String,
379 pub data_layout: String,
381 pub type_aliases: Vec<LlvmTypeAlias>,
383 pub globals: Vec<LlvmGlobal>,
385 pub functions: Vec<LlvmFunc>,
387 pub metadata: Vec<(String, String)>,
389}
390impl LlvmModule {
391 pub fn emit(&self) -> String {
393 let mut out = String::new();
394 if !self.source_filename.is_empty() {
395 out.push_str(&format!("source_filename = \"{}\"\n", self.source_filename));
396 }
397 if !self.target_triple.is_empty() {
398 out.push_str(&format!("target triple = \"{}\"\n", self.target_triple));
399 }
400 if !self.data_layout.is_empty() {
401 out.push_str(&format!("target datalayout = \"{}\"\n", self.data_layout));
402 }
403 if !self.source_filename.is_empty()
404 || !self.target_triple.is_empty()
405 || !self.data_layout.is_empty()
406 {
407 out.push('\n');
408 }
409 if !self.type_aliases.is_empty() {
410 for alias in &self.type_aliases {
411 out.push_str(&format!("{}\n", alias));
412 }
413 out.push('\n');
414 }
415 if !self.globals.is_empty() {
416 for global in &self.globals {
417 out.push_str(&format!("{}\n", global));
418 }
419 out.push('\n');
420 }
421 for func in &self.functions {
422 out.push_str(&format!("{}\n", func));
423 }
424 if !self.metadata.is_empty() {
425 out.push('\n');
426 for (name, val) in &self.metadata {
427 out.push_str(&format!("!{} = !{{{}}}\n", name, val));
428 }
429 }
430 out
431 }
432}
433#[allow(dead_code)]
435#[derive(Debug, Clone, Default)]
436pub struct LLVMExtConstFolder {
437 pub(super) folds: usize,
438 pub(super) failures: usize,
439 pub(super) enabled: bool,
440}
441impl LLVMExtConstFolder {
442 #[allow(dead_code)]
443 pub fn new() -> Self {
444 Self {
445 folds: 0,
446 failures: 0,
447 enabled: true,
448 }
449 }
450 #[allow(dead_code)]
451 pub fn add_i64(&mut self, a: i64, b: i64) -> Option<i64> {
452 self.folds += 1;
453 a.checked_add(b)
454 }
455 #[allow(dead_code)]
456 pub fn sub_i64(&mut self, a: i64, b: i64) -> Option<i64> {
457 self.folds += 1;
458 a.checked_sub(b)
459 }
460 #[allow(dead_code)]
461 pub fn mul_i64(&mut self, a: i64, b: i64) -> Option<i64> {
462 self.folds += 1;
463 a.checked_mul(b)
464 }
465 #[allow(dead_code)]
466 pub fn div_i64(&mut self, a: i64, b: i64) -> Option<i64> {
467 if b == 0 {
468 self.failures += 1;
469 None
470 } else {
471 self.folds += 1;
472 a.checked_div(b)
473 }
474 }
475 #[allow(dead_code)]
476 pub fn rem_i64(&mut self, a: i64, b: i64) -> Option<i64> {
477 if b == 0 {
478 self.failures += 1;
479 None
480 } else {
481 self.folds += 1;
482 a.checked_rem(b)
483 }
484 }
485 #[allow(dead_code)]
486 pub fn neg_i64(&mut self, a: i64) -> Option<i64> {
487 self.folds += 1;
488 a.checked_neg()
489 }
490 #[allow(dead_code)]
491 pub fn shl_i64(&mut self, a: i64, s: u32) -> Option<i64> {
492 if s >= 64 {
493 self.failures += 1;
494 None
495 } else {
496 self.folds += 1;
497 a.checked_shl(s)
498 }
499 }
500 #[allow(dead_code)]
501 pub fn shr_i64(&mut self, a: i64, s: u32) -> Option<i64> {
502 if s >= 64 {
503 self.failures += 1;
504 None
505 } else {
506 self.folds += 1;
507 a.checked_shr(s)
508 }
509 }
510 #[allow(dead_code)]
511 pub fn and_i64(&mut self, a: i64, b: i64) -> i64 {
512 self.folds += 1;
513 a & b
514 }
515 #[allow(dead_code)]
516 pub fn or_i64(&mut self, a: i64, b: i64) -> i64 {
517 self.folds += 1;
518 a | b
519 }
520 #[allow(dead_code)]
521 pub fn xor_i64(&mut self, a: i64, b: i64) -> i64 {
522 self.folds += 1;
523 a ^ b
524 }
525 #[allow(dead_code)]
526 pub fn not_i64(&mut self, a: i64) -> i64 {
527 self.folds += 1;
528 !a
529 }
530 #[allow(dead_code)]
531 pub fn fold_count(&self) -> usize {
532 self.folds
533 }
534 #[allow(dead_code)]
535 pub fn failure_count(&self) -> usize {
536 self.failures
537 }
538 #[allow(dead_code)]
539 pub fn enable(&mut self) {
540 self.enabled = true;
541 }
542 #[allow(dead_code)]
543 pub fn disable(&mut self) {
544 self.enabled = false;
545 }
546 #[allow(dead_code)]
547 pub fn is_enabled(&self) -> bool {
548 self.enabled
549 }
550}
551#[derive(Debug, Clone, PartialEq, Eq)]
553pub enum LlvmAttr {
554 NoUnwind,
556 ReadOnly,
558 WriteOnly,
560 NoReturn,
562 NoAlias,
564 Align(u32),
566 Dereferenceable(u64),
568 InlineHint,
570 AlwaysInline,
572 NoInline,
574 Cold,
576 OptSize,
578 UwTable,
580 StackProtect,
582}
583#[allow(dead_code)]
584#[derive(Debug, Clone)]
585pub struct LLVMDepGraph {
586 pub(super) nodes: Vec<u32>,
587 pub(super) edges: Vec<(u32, u32)>,
588}
589impl LLVMDepGraph {
590 #[allow(dead_code)]
591 pub fn new() -> Self {
592 LLVMDepGraph {
593 nodes: Vec::new(),
594 edges: Vec::new(),
595 }
596 }
597 #[allow(dead_code)]
598 pub fn add_node(&mut self, id: u32) {
599 if !self.nodes.contains(&id) {
600 self.nodes.push(id);
601 }
602 }
603 #[allow(dead_code)]
604 pub fn add_dep(&mut self, dep: u32, dependent: u32) {
605 self.add_node(dep);
606 self.add_node(dependent);
607 self.edges.push((dep, dependent));
608 }
609 #[allow(dead_code)]
610 pub fn dependents_of(&self, node: u32) -> Vec<u32> {
611 self.edges
612 .iter()
613 .filter(|(d, _)| *d == node)
614 .map(|(_, dep)| *dep)
615 .collect()
616 }
617 #[allow(dead_code)]
618 pub fn dependencies_of(&self, node: u32) -> Vec<u32> {
619 self.edges
620 .iter()
621 .filter(|(_, dep)| *dep == node)
622 .map(|(d, _)| *d)
623 .collect()
624 }
625 #[allow(dead_code)]
626 pub fn topological_sort(&self) -> Vec<u32> {
627 let mut in_degree: std::collections::HashMap<u32, u32> = std::collections::HashMap::new();
628 for &n in &self.nodes {
629 in_degree.insert(n, 0);
630 }
631 for (_, dep) in &self.edges {
632 *in_degree.entry(*dep).or_insert(0) += 1;
633 }
634 let mut queue: std::collections::VecDeque<u32> = self
635 .nodes
636 .iter()
637 .filter(|&&n| in_degree[&n] == 0)
638 .copied()
639 .collect();
640 let mut result = Vec::new();
641 while let Some(node) = queue.pop_front() {
642 result.push(node);
643 for dep in self.dependents_of(node) {
644 let cnt = in_degree.entry(dep).or_insert(0);
645 *cnt = cnt.saturating_sub(1);
646 if *cnt == 0 {
647 queue.push_back(dep);
648 }
649 }
650 }
651 result
652 }
653 #[allow(dead_code)]
654 pub fn has_cycle(&self) -> bool {
655 self.topological_sort().len() < self.nodes.len()
656 }
657}
658#[allow(dead_code)]
660#[derive(Debug, Clone, PartialEq, Eq, Hash)]
661pub enum LLVMExtPassPhase {
662 Early,
663 Middle,
664 Late,
665 Finalize,
666}
667impl LLVMExtPassPhase {
668 #[allow(dead_code)]
669 pub fn is_early(&self) -> bool {
670 matches!(self, Self::Early)
671 }
672 #[allow(dead_code)]
673 pub fn is_middle(&self) -> bool {
674 matches!(self, Self::Middle)
675 }
676 #[allow(dead_code)]
677 pub fn is_late(&self) -> bool {
678 matches!(self, Self::Late)
679 }
680 #[allow(dead_code)]
681 pub fn is_finalize(&self) -> bool {
682 matches!(self, Self::Finalize)
683 }
684 #[allow(dead_code)]
685 pub fn order(&self) -> u32 {
686 match self {
687 Self::Early => 0,
688 Self::Middle => 1,
689 Self::Late => 2,
690 Self::Finalize => 3,
691 }
692 }
693 #[allow(dead_code)]
694 pub fn from_order(n: u32) -> Option<Self> {
695 match n {
696 0 => Some(Self::Early),
697 1 => Some(Self::Middle),
698 2 => Some(Self::Late),
699 3 => Some(Self::Finalize),
700 _ => None,
701 }
702 }
703}
704#[derive(Debug, Clone, PartialEq)]
706pub struct LlvmTypeAlias {
707 pub name: String,
709 pub ty: LlvmType,
711}
712#[derive(Debug, Clone, PartialEq)]
714pub enum LlvmValue {
715 Const(i64),
717 Float(f64),
719 Undef,
721 Null,
723 True_,
725 False_,
727 GlobalRef(String),
729 LocalRef(String),
731 ConstArray(LlvmType, Vec<LlvmValue>),
733 ConstStruct(Vec<LlvmValue>),
735 ZeroInitializer,
737}
738#[derive(Debug, Clone, PartialEq, Eq)]
740pub enum IcmpPred {
741 Eq,
743 Ne,
745 Slt,
747 Sgt,
749 Sle,
751 Sge,
753 Ult,
755 Ugt,
757 Ule,
759 Uge,
761}
762#[allow(dead_code)]
764#[derive(Debug, Clone)]
765pub struct LLVMExtPassConfig {
766 pub name: String,
767 pub phase: LLVMExtPassPhase,
768 pub enabled: bool,
769 pub max_iterations: usize,
770 pub debug: u32,
771 pub timeout_ms: Option<u64>,
772}
773impl LLVMExtPassConfig {
774 #[allow(dead_code)]
775 pub fn new(name: impl Into<String>) -> Self {
776 Self {
777 name: name.into(),
778 phase: LLVMExtPassPhase::Middle,
779 enabled: true,
780 max_iterations: 100,
781 debug: 0,
782 timeout_ms: None,
783 }
784 }
785 #[allow(dead_code)]
786 pub fn with_phase(mut self, phase: LLVMExtPassPhase) -> Self {
787 self.phase = phase;
788 self
789 }
790 #[allow(dead_code)]
791 pub fn with_max_iter(mut self, n: usize) -> Self {
792 self.max_iterations = n;
793 self
794 }
795 #[allow(dead_code)]
796 pub fn with_debug(mut self, d: u32) -> Self {
797 self.debug = d;
798 self
799 }
800 #[allow(dead_code)]
801 pub fn disabled(mut self) -> Self {
802 self.enabled = false;
803 self
804 }
805 #[allow(dead_code)]
806 pub fn with_timeout(mut self, ms: u64) -> Self {
807 self.timeout_ms = Some(ms);
808 self
809 }
810 #[allow(dead_code)]
811 pub fn is_debug_enabled(&self) -> bool {
812 self.debug > 0
813 }
814}
815#[allow(dead_code)]
816#[derive(Debug, Clone)]
817pub struct LLVMAnalysisCache {
818 pub(super) entries: std::collections::HashMap<String, LLVMCacheEntry>,
819 pub(super) max_size: usize,
820 pub(super) hits: u64,
821 pub(super) misses: u64,
822}
823impl LLVMAnalysisCache {
824 #[allow(dead_code)]
825 pub fn new(max_size: usize) -> Self {
826 LLVMAnalysisCache {
827 entries: std::collections::HashMap::new(),
828 max_size,
829 hits: 0,
830 misses: 0,
831 }
832 }
833 #[allow(dead_code)]
834 pub fn get(&mut self, key: &str) -> Option<&LLVMCacheEntry> {
835 if self.entries.contains_key(key) {
836 self.hits += 1;
837 self.entries.get(key)
838 } else {
839 self.misses += 1;
840 None
841 }
842 }
843 #[allow(dead_code)]
844 pub fn insert(&mut self, key: String, data: Vec<u8>) {
845 if self.entries.len() >= self.max_size {
846 if let Some(oldest) = self.entries.keys().next().cloned() {
847 self.entries.remove(&oldest);
848 }
849 }
850 self.entries.insert(
851 key.clone(),
852 LLVMCacheEntry {
853 key,
854 data,
855 timestamp: 0,
856 valid: true,
857 },
858 );
859 }
860 #[allow(dead_code)]
861 pub fn invalidate(&mut self, key: &str) {
862 if let Some(entry) = self.entries.get_mut(key) {
863 entry.valid = false;
864 }
865 }
866 #[allow(dead_code)]
867 pub fn clear(&mut self) {
868 self.entries.clear();
869 }
870 #[allow(dead_code)]
871 pub fn hit_rate(&self) -> f64 {
872 let total = self.hits + self.misses;
873 if total == 0 {
874 return 0.0;
875 }
876 self.hits as f64 / total as f64
877 }
878 #[allow(dead_code)]
879 pub fn size(&self) -> usize {
880 self.entries.len()
881 }
882}
883#[allow(dead_code)]
884#[derive(Debug, Clone)]
885pub struct LLVMCacheEntry {
886 pub key: String,
887 pub data: Vec<u8>,
888 pub timestamp: u64,
889 pub valid: bool,
890}
891#[allow(dead_code)]
893#[derive(Debug)]
894pub struct LLVMExtCache {
895 pub(super) entries: Vec<(u64, Vec<u8>, bool, u32)>,
896 pub(super) cap: usize,
897 pub(super) total_hits: u64,
898 pub(super) total_misses: u64,
899}
900impl LLVMExtCache {
901 #[allow(dead_code)]
902 pub fn new(cap: usize) -> Self {
903 Self {
904 entries: Vec::new(),
905 cap,
906 total_hits: 0,
907 total_misses: 0,
908 }
909 }
910 #[allow(dead_code)]
911 pub fn get(&mut self, key: u64) -> Option<&[u8]> {
912 for e in self.entries.iter_mut() {
913 if e.0 == key && e.2 {
914 e.3 += 1;
915 self.total_hits += 1;
916 return Some(&e.1);
917 }
918 }
919 self.total_misses += 1;
920 None
921 }
922 #[allow(dead_code)]
923 pub fn put(&mut self, key: u64, data: Vec<u8>) {
924 if self.entries.len() >= self.cap {
925 self.entries.retain(|e| e.2);
926 if self.entries.len() >= self.cap {
927 self.entries.remove(0);
928 }
929 }
930 self.entries.push((key, data, true, 0));
931 }
932 #[allow(dead_code)]
933 pub fn invalidate(&mut self) {
934 for e in self.entries.iter_mut() {
935 e.2 = false;
936 }
937 }
938 #[allow(dead_code)]
939 pub fn hit_rate(&self) -> f64 {
940 let t = self.total_hits + self.total_misses;
941 if t == 0 {
942 0.0
943 } else {
944 self.total_hits as f64 / t as f64
945 }
946 }
947 #[allow(dead_code)]
948 pub fn live_count(&self) -> usize {
949 self.entries.iter().filter(|e| e.2).count()
950 }
951}
952#[allow(dead_code)]
954#[derive(Debug, Clone)]
955pub struct LLVMExtDepGraph {
956 pub(super) n: usize,
957 pub(super) adj: Vec<Vec<usize>>,
958 pub(super) rev: Vec<Vec<usize>>,
959 pub(super) edge_count: usize,
960}
961impl LLVMExtDepGraph {
962 #[allow(dead_code)]
963 pub fn new(n: usize) -> Self {
964 Self {
965 n,
966 adj: vec![Vec::new(); n],
967 rev: vec![Vec::new(); n],
968 edge_count: 0,
969 }
970 }
971 #[allow(dead_code)]
972 pub fn add_edge(&mut self, from: usize, to: usize) {
973 if from < self.n && to < self.n {
974 if !self.adj[from].contains(&to) {
975 self.adj[from].push(to);
976 self.rev[to].push(from);
977 self.edge_count += 1;
978 }
979 }
980 }
981 #[allow(dead_code)]
982 pub fn succs(&self, n: usize) -> &[usize] {
983 self.adj.get(n).map(|v| v.as_slice()).unwrap_or(&[])
984 }
985 #[allow(dead_code)]
986 pub fn preds(&self, n: usize) -> &[usize] {
987 self.rev.get(n).map(|v| v.as_slice()).unwrap_or(&[])
988 }
989 #[allow(dead_code)]
990 pub fn topo_sort(&self) -> Option<Vec<usize>> {
991 let mut deg: Vec<usize> = (0..self.n).map(|i| self.rev[i].len()).collect();
992 let mut q: std::collections::VecDeque<usize> =
993 (0..self.n).filter(|&i| deg[i] == 0).collect();
994 let mut out = Vec::with_capacity(self.n);
995 while let Some(u) = q.pop_front() {
996 out.push(u);
997 for &v in &self.adj[u] {
998 deg[v] -= 1;
999 if deg[v] == 0 {
1000 q.push_back(v);
1001 }
1002 }
1003 }
1004 if out.len() == self.n {
1005 Some(out)
1006 } else {
1007 None
1008 }
1009 }
1010 #[allow(dead_code)]
1011 pub fn has_cycle(&self) -> bool {
1012 self.topo_sort().is_none()
1013 }
1014 #[allow(dead_code)]
1015 pub fn reachable(&self, start: usize) -> Vec<usize> {
1016 let mut vis = vec![false; self.n];
1017 let mut stk = vec![start];
1018 let mut out = Vec::new();
1019 while let Some(u) = stk.pop() {
1020 if u < self.n && !vis[u] {
1021 vis[u] = true;
1022 out.push(u);
1023 for &v in &self.adj[u] {
1024 if !vis[v] {
1025 stk.push(v);
1026 }
1027 }
1028 }
1029 }
1030 out
1031 }
1032 #[allow(dead_code)]
1033 pub fn scc(&self) -> Vec<Vec<usize>> {
1034 let mut visited = vec![false; self.n];
1035 let mut order = Vec::new();
1036 for i in 0..self.n {
1037 if !visited[i] {
1038 let mut stk = vec![(i, 0usize)];
1039 while let Some((u, idx)) = stk.last_mut() {
1040 if !visited[*u] {
1041 visited[*u] = true;
1042 }
1043 if *idx < self.adj[*u].len() {
1044 let v = self.adj[*u][*idx];
1045 *idx += 1;
1046 if !visited[v] {
1047 stk.push((v, 0));
1048 }
1049 } else {
1050 order.push(*u);
1051 stk.pop();
1052 }
1053 }
1054 }
1055 }
1056 let mut comp = vec![usize::MAX; self.n];
1057 let mut components: Vec<Vec<usize>> = Vec::new();
1058 for &start in order.iter().rev() {
1059 if comp[start] == usize::MAX {
1060 let cid = components.len();
1061 let mut component = Vec::new();
1062 let mut stk = vec![start];
1063 while let Some(u) = stk.pop() {
1064 if comp[u] == usize::MAX {
1065 comp[u] = cid;
1066 component.push(u);
1067 for &v in &self.rev[u] {
1068 if comp[v] == usize::MAX {
1069 stk.push(v);
1070 }
1071 }
1072 }
1073 }
1074 components.push(component);
1075 }
1076 }
1077 components
1078 }
1079 #[allow(dead_code)]
1080 pub fn node_count(&self) -> usize {
1081 self.n
1082 }
1083 #[allow(dead_code)]
1084 pub fn edge_count(&self) -> usize {
1085 self.edge_count
1086 }
1087}
1088pub struct LlvmBackend {
1092 pub(super) reg_counter: u64,
1094}
1095impl LlvmBackend {
1096 pub fn new() -> Self {
1098 LlvmBackend { reg_counter: 0 }
1099 }
1100 pub fn fresh_reg(&mut self) -> String {
1102 let r = format!("_r{}", self.reg_counter);
1103 self.reg_counter += 1;
1104 r
1105 }
1106 pub fn mangle_name(name: &str) -> String {
1109 name.chars()
1110 .map(|c| {
1111 if c.is_alphanumeric() || c == '_' {
1112 c
1113 } else {
1114 '_'
1115 }
1116 })
1117 .collect()
1118 }
1119 pub fn llvm_type_for(ty: &LcnfType) -> LlvmType {
1121 match ty {
1122 LcnfType::Erased => LlvmType::I64,
1123 LcnfType::Var(_) => LlvmType::Ptr,
1124 LcnfType::Fun(_, _) => LlvmType::Ptr,
1125 LcnfType::Ctor(_, _) => LlvmType::Ptr,
1126 LcnfType::Object => LlvmType::Ptr,
1127 LcnfType::Nat => LlvmType::I64,
1128 LcnfType::LcnfString => LlvmType::Ptr,
1129 LcnfType::Unit => LlvmType::I64,
1130 LcnfType::Irrelevant => LlvmType::I64,
1131 }
1132 }
1133 pub fn compile_decl(&mut self, decl: &LcnfFunDecl) -> LlvmFunc {
1135 let mangled_name = Self::mangle_name(&decl.name);
1136 let ret_ty = Self::llvm_type_for(&decl.ret_type);
1137 let params: Vec<(LlvmType, String)> = decl
1138 .params
1139 .iter()
1140 .map(|p| {
1141 let ty = Self::llvm_type_for(&p.ty);
1142 let name = format!("p{}", p.id.0);
1143 (ty, name)
1144 })
1145 .collect();
1146 let mut body: Vec<LlvmInstr> = Vec::new();
1147 self.compile_expr(&decl.body, &ret_ty, &mut body);
1148 LlvmFunc {
1149 name: mangled_name,
1150 ret_ty,
1151 params,
1152 body,
1153 linkage: LlvmLinkage::External,
1154 attrs: vec![LlvmAttr::NoUnwind],
1155 is_declare: false,
1156 }
1157 }
1158 pub(super) fn compile_expr(
1160 &mut self,
1161 expr: &LcnfExpr,
1162 ret_ty: &LlvmType,
1163 body: &mut Vec<LlvmInstr>,
1164 ) {
1165 match expr {
1166 LcnfExpr::Return(arg) => {
1167 let val = self.compile_arg(arg);
1168 if *ret_ty == LlvmType::Void {
1169 body.push(LlvmInstr::Ret(None));
1170 } else {
1171 body.push(LlvmInstr::Ret(Some((ret_ty.clone(), val))));
1172 }
1173 }
1174 LcnfExpr::Unreachable => {
1175 body.push(LlvmInstr::Unreachable);
1176 }
1177 LcnfExpr::Let {
1178 id,
1179 value,
1180 body: rest,
1181 ..
1182 } => {
1183 let reg = format!("x{}", id.0);
1184 self.compile_let_value(value, ®, body);
1185 self.compile_expr(rest, ret_ty, body);
1186 }
1187 LcnfExpr::Case {
1188 scrutinee,
1189 alts,
1190 default,
1191 ..
1192 } => {
1193 let scrut_val = LlvmValue::LocalRef(format!("x{}", scrutinee.0));
1194 let merge_label = format!("merge_{}", self.fresh_reg());
1195 if alts.is_empty() {
1196 if let Some(def) = default {
1197 self.compile_expr(def, ret_ty, body);
1198 } else {
1199 body.push(LlvmInstr::Unreachable);
1200 }
1201 return;
1202 }
1203 let alt_labels: Vec<String> = alts
1204 .iter()
1205 .enumerate()
1206 .map(|(i, a)| Self::mangle_name(&format!("case_{}_{}", a.ctor_name, i)))
1207 .collect();
1208 let default_label = format!("default_{}", self.fresh_reg());
1209 for (i, (alt, label)) in alts.iter().zip(alt_labels.iter()).enumerate() {
1210 let next_label = if i + 1 < alt_labels.len() {
1211 alt_labels[i + 1].clone()
1212 } else {
1213 default_label.clone()
1214 };
1215 let tag_reg = self.fresh_reg();
1216 body.push(LlvmInstr::ICmp {
1217 result: tag_reg.clone(),
1218 pred: IcmpPred::Eq,
1219 lhs: scrut_val.clone(),
1220 rhs: LlvmValue::Const(i as i64),
1221 });
1222 body.push(LlvmInstr::CondBr {
1223 cond: LlvmValue::LocalRef(tag_reg),
1224 true_: label.clone(),
1225 false_: next_label,
1226 });
1227 body.push(LlvmInstr::Label(label.clone()));
1228 self.compile_expr(&alt.body, ret_ty, body);
1229 body.push(LlvmInstr::Br(merge_label.clone()));
1230 }
1231 body.push(LlvmInstr::Label(default_label));
1232 if let Some(def) = default {
1233 self.compile_expr(def, ret_ty, body);
1234 } else {
1235 body.push(LlvmInstr::Unreachable);
1236 }
1237 body.push(LlvmInstr::Br(merge_label.clone()));
1238 body.push(LlvmInstr::Label(merge_label));
1239 }
1240 LcnfExpr::TailCall(func_arg, args) => {
1241 let func_name = match func_arg {
1242 LcnfArg::Var(id) => format!("x{}", id.0),
1243 _ => "unknown".to_string(),
1244 };
1245 let mangled = Self::mangle_name(&func_name);
1246 let compiled_args: Vec<(LlvmType, LlvmValue)> = args
1247 .iter()
1248 .map(|a| (LlvmType::I64, self.compile_arg(a)))
1249 .collect();
1250 let result_reg = if *ret_ty == LlvmType::Void {
1251 None
1252 } else {
1253 Some(self.fresh_reg())
1254 };
1255 let result_clone = result_reg.clone();
1256 body.push(LlvmInstr::Call {
1257 result: result_reg,
1258 ret_ty: ret_ty.clone(),
1259 func: mangled,
1260 args: compiled_args,
1261 });
1262 if let Some(r) = result_clone {
1263 body.push(LlvmInstr::Ret(Some((
1264 ret_ty.clone(),
1265 LlvmValue::LocalRef(r),
1266 ))));
1267 } else {
1268 body.push(LlvmInstr::Ret(None));
1269 }
1270 }
1271 }
1272 }
1273 pub(super) fn compile_let_value(
1275 &mut self,
1276 val: &LcnfLetValue,
1277 reg: &str,
1278 body: &mut Vec<LlvmInstr>,
1279 ) {
1280 match val {
1281 LcnfLetValue::Lit(lit) => {
1282 let const_val = match lit {
1283 LcnfLit::Nat(n) => LlvmValue::Const(*n as i64),
1284 LcnfLit::Str(_) => LlvmValue::Null,
1285 };
1286 body.push(LlvmInstr::Add {
1287 result: reg.to_string(),
1288 lhs: const_val,
1289 rhs: LlvmValue::Const(0),
1290 });
1291 }
1292 LcnfLetValue::FVar(id) => {
1293 body.push(LlvmInstr::Add {
1294 result: reg.to_string(),
1295 lhs: LlvmValue::LocalRef(format!("x{}", id.0)),
1296 rhs: LlvmValue::Const(0),
1297 });
1298 }
1299 LcnfLetValue::App(func_arg, args) => {
1300 let func_name = match func_arg {
1301 LcnfArg::Var(id) => format!("x{}", id.0),
1302 _ => "unknown".to_string(),
1303 };
1304 let mangled = Self::mangle_name(&func_name);
1305 let compiled_args: Vec<(LlvmType, LlvmValue)> = args
1306 .iter()
1307 .map(|a| (LlvmType::I64, self.compile_arg(a)))
1308 .collect();
1309 body.push(LlvmInstr::Call {
1310 result: Some(reg.to_string()),
1311 ret_ty: LlvmType::I64,
1312 func: mangled,
1313 args: compiled_args,
1314 });
1315 }
1316 LcnfLetValue::Ctor(name, _tag, args) => {
1317 let mangled = format!("ctor_{}", Self::mangle_name(name));
1318 let compiled_args: Vec<(LlvmType, LlvmValue)> = args
1319 .iter()
1320 .map(|a| (LlvmType::I64, self.compile_arg(a)))
1321 .collect();
1322 body.push(LlvmInstr::Call {
1323 result: Some(reg.to_string()),
1324 ret_ty: LlvmType::I64,
1325 func: mangled,
1326 args: compiled_args,
1327 });
1328 }
1329 LcnfLetValue::Proj(_name, idx, base) => {
1330 let gep_reg = self.fresh_reg();
1331 body.push(LlvmInstr::GetElementPtr {
1332 result: gep_reg.clone(),
1333 base_ty: LlvmType::I64,
1334 ptr: LlvmValue::LocalRef(format!("x{}", base.0)),
1335 indices: vec![
1336 (LlvmType::I32, LlvmValue::Const(0)),
1337 (LlvmType::I32, LlvmValue::Const(*idx as i64)),
1338 ],
1339 });
1340 body.push(LlvmInstr::Load {
1341 result: reg.to_string(),
1342 ty: LlvmType::I64,
1343 ptr: LlvmValue::LocalRef(gep_reg),
1344 align: Some(8),
1345 });
1346 }
1347 LcnfLetValue::Reset(var) => {
1348 body.push(LlvmInstr::Add {
1349 result: reg.to_string(),
1350 lhs: LlvmValue::LocalRef(format!("x{}", var.0)),
1351 rhs: LlvmValue::Const(0),
1352 });
1353 }
1354 LcnfLetValue::Reuse(_slot, name, _tag, args) => {
1355 let mangled = format!("ctor_{}", Self::mangle_name(name));
1356 let compiled_args: Vec<(LlvmType, LlvmValue)> = args
1357 .iter()
1358 .map(|a| (LlvmType::I64, self.compile_arg(a)))
1359 .collect();
1360 body.push(LlvmInstr::Call {
1361 result: Some(reg.to_string()),
1362 ret_ty: LlvmType::I64,
1363 func: mangled,
1364 args: compiled_args,
1365 });
1366 }
1367 LcnfLetValue::Erased => {
1368 body.push(LlvmInstr::Add {
1369 result: reg.to_string(),
1370 lhs: LlvmValue::Const(0),
1371 rhs: LlvmValue::Const(0),
1372 });
1373 }
1374 }
1375 }
1376 pub(super) fn compile_arg(&self, arg: &LcnfArg) -> LlvmValue {
1378 match arg {
1379 LcnfArg::Var(id) => LlvmValue::LocalRef(format!("x{}", id.0)),
1380 LcnfArg::Lit(LcnfLit::Nat(n)) => LlvmValue::Const(*n as i64),
1381 LcnfArg::Lit(LcnfLit::Str(_)) => LlvmValue::Null,
1382 LcnfArg::Erased => LlvmValue::Const(0),
1383 LcnfArg::Type(_) => LlvmValue::Const(0),
1384 }
1385 }
1386 pub fn emit_module(&mut self, decls: &[LcnfFunDecl]) -> Result<String, String> {
1388 let mut module = LlvmModule {
1389 source_filename: "oxilean_output.ll".to_string(),
1390 target_triple: "x86_64-unknown-linux-gnu".to_string(),
1391 data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
1392 .to_string(),
1393 ..Default::default()
1394 };
1395 for decl in decls {
1396 let func = self.compile_decl(decl);
1397 module.functions.push(func);
1398 }
1399 Ok(module.emit())
1400 }
1401}
1402#[allow(dead_code)]
1403pub struct LLVMPassRegistry {
1404 pub(super) configs: Vec<LLVMPassConfig>,
1405 pub(super) stats: std::collections::HashMap<String, LLVMPassStats>,
1406}
1407impl LLVMPassRegistry {
1408 #[allow(dead_code)]
1409 pub fn new() -> Self {
1410 LLVMPassRegistry {
1411 configs: Vec::new(),
1412 stats: std::collections::HashMap::new(),
1413 }
1414 }
1415 #[allow(dead_code)]
1416 pub fn register(&mut self, config: LLVMPassConfig) {
1417 self.stats
1418 .insert(config.pass_name.clone(), LLVMPassStats::new());
1419 self.configs.push(config);
1420 }
1421 #[allow(dead_code)]
1422 pub fn enabled_passes(&self) -> Vec<&LLVMPassConfig> {
1423 self.configs.iter().filter(|c| c.enabled).collect()
1424 }
1425 #[allow(dead_code)]
1426 pub fn get_stats(&self, name: &str) -> Option<&LLVMPassStats> {
1427 self.stats.get(name)
1428 }
1429 #[allow(dead_code)]
1430 pub fn total_passes(&self) -> usize {
1431 self.configs.len()
1432 }
1433 #[allow(dead_code)]
1434 pub fn enabled_count(&self) -> usize {
1435 self.enabled_passes().len()
1436 }
1437 #[allow(dead_code)]
1438 pub fn update_stats(&mut self, name: &str, changes: u64, time_ms: u64, iter: u32) {
1439 if let Some(stats) = self.stats.get_mut(name) {
1440 stats.record_run(changes, time_ms, iter);
1441 }
1442 }
1443}
1444#[allow(dead_code)]
1445#[derive(Debug, Clone)]
1446pub struct LLVMWorklist {
1447 pub(super) items: std::collections::VecDeque<u32>,
1448 pub(super) in_worklist: std::collections::HashSet<u32>,
1449}
1450impl LLVMWorklist {
1451 #[allow(dead_code)]
1452 pub fn new() -> Self {
1453 LLVMWorklist {
1454 items: std::collections::VecDeque::new(),
1455 in_worklist: std::collections::HashSet::new(),
1456 }
1457 }
1458 #[allow(dead_code)]
1459 pub fn push(&mut self, item: u32) -> bool {
1460 if self.in_worklist.insert(item) {
1461 self.items.push_back(item);
1462 true
1463 } else {
1464 false
1465 }
1466 }
1467 #[allow(dead_code)]
1468 pub fn pop(&mut self) -> Option<u32> {
1469 let item = self.items.pop_front()?;
1470 self.in_worklist.remove(&item);
1471 Some(item)
1472 }
1473 #[allow(dead_code)]
1474 pub fn is_empty(&self) -> bool {
1475 self.items.is_empty()
1476 }
1477 #[allow(dead_code)]
1478 pub fn len(&self) -> usize {
1479 self.items.len()
1480 }
1481 #[allow(dead_code)]
1482 pub fn contains(&self, item: u32) -> bool {
1483 self.in_worklist.contains(&item)
1484 }
1485}
1486#[allow(dead_code)]
1487pub struct LLVMConstantFoldingHelper;
1488impl LLVMConstantFoldingHelper {
1489 #[allow(dead_code)]
1490 pub fn fold_add_i64(a: i64, b: i64) -> Option<i64> {
1491 a.checked_add(b)
1492 }
1493 #[allow(dead_code)]
1494 pub fn fold_sub_i64(a: i64, b: i64) -> Option<i64> {
1495 a.checked_sub(b)
1496 }
1497 #[allow(dead_code)]
1498 pub fn fold_mul_i64(a: i64, b: i64) -> Option<i64> {
1499 a.checked_mul(b)
1500 }
1501 #[allow(dead_code)]
1502 pub fn fold_div_i64(a: i64, b: i64) -> Option<i64> {
1503 if b == 0 {
1504 None
1505 } else {
1506 a.checked_div(b)
1507 }
1508 }
1509 #[allow(dead_code)]
1510 pub fn fold_add_f64(a: f64, b: f64) -> f64 {
1511 a + b
1512 }
1513 #[allow(dead_code)]
1514 pub fn fold_mul_f64(a: f64, b: f64) -> f64 {
1515 a * b
1516 }
1517 #[allow(dead_code)]
1518 pub fn fold_neg_i64(a: i64) -> Option<i64> {
1519 a.checked_neg()
1520 }
1521 #[allow(dead_code)]
1522 pub fn fold_not_bool(a: bool) -> bool {
1523 !a
1524 }
1525 #[allow(dead_code)]
1526 pub fn fold_and_bool(a: bool, b: bool) -> bool {
1527 a && b
1528 }
1529 #[allow(dead_code)]
1530 pub fn fold_or_bool(a: bool, b: bool) -> bool {
1531 a || b
1532 }
1533 #[allow(dead_code)]
1534 pub fn fold_shl_i64(a: i64, b: u32) -> Option<i64> {
1535 a.checked_shl(b)
1536 }
1537 #[allow(dead_code)]
1538 pub fn fold_shr_i64(a: i64, b: u32) -> Option<i64> {
1539 a.checked_shr(b)
1540 }
1541 #[allow(dead_code)]
1542 pub fn fold_rem_i64(a: i64, b: i64) -> Option<i64> {
1543 if b == 0 {
1544 None
1545 } else {
1546 Some(a % b)
1547 }
1548 }
1549 #[allow(dead_code)]
1550 pub fn fold_bitand_i64(a: i64, b: i64) -> i64 {
1551 a & b
1552 }
1553 #[allow(dead_code)]
1554 pub fn fold_bitor_i64(a: i64, b: i64) -> i64 {
1555 a | b
1556 }
1557 #[allow(dead_code)]
1558 pub fn fold_bitxor_i64(a: i64, b: i64) -> i64 {
1559 a ^ b
1560 }
1561 #[allow(dead_code)]
1562 pub fn fold_bitnot_i64(a: i64) -> i64 {
1563 !a
1564 }
1565}
1566#[allow(dead_code)]
1568#[derive(Debug, Clone)]
1569pub struct LLVMExtDomTree {
1570 pub(super) idom: Vec<Option<usize>>,
1571 pub(super) children: Vec<Vec<usize>>,
1572 pub(super) depth: Vec<usize>,
1573}
1574impl LLVMExtDomTree {
1575 #[allow(dead_code)]
1576 pub fn new(n: usize) -> Self {
1577 Self {
1578 idom: vec![None; n],
1579 children: vec![Vec::new(); n],
1580 depth: vec![0; n],
1581 }
1582 }
1583 #[allow(dead_code)]
1584 pub fn set_idom(&mut self, node: usize, dom: usize) {
1585 if node < self.idom.len() {
1586 self.idom[node] = Some(dom);
1587 if dom < self.children.len() {
1588 self.children[dom].push(node);
1589 }
1590 self.depth[node] = if dom < self.depth.len() {
1591 self.depth[dom] + 1
1592 } else {
1593 1
1594 };
1595 }
1596 }
1597 #[allow(dead_code)]
1598 pub fn dominates(&self, a: usize, mut b: usize) -> bool {
1599 if a == b {
1600 return true;
1601 }
1602 let n = self.idom.len();
1603 for _ in 0..n {
1604 match self.idom.get(b).copied().flatten() {
1605 None => return false,
1606 Some(p) if p == a => return true,
1607 Some(p) if p == b => return false,
1608 Some(p) => b = p,
1609 }
1610 }
1611 false
1612 }
1613 #[allow(dead_code)]
1614 pub fn children_of(&self, n: usize) -> &[usize] {
1615 self.children.get(n).map(|v| v.as_slice()).unwrap_or(&[])
1616 }
1617 #[allow(dead_code)]
1618 pub fn depth_of(&self, n: usize) -> usize {
1619 self.depth.get(n).copied().unwrap_or(0)
1620 }
1621 #[allow(dead_code)]
1622 pub fn lca(&self, mut a: usize, mut b: usize) -> usize {
1623 let n = self.idom.len();
1624 for _ in 0..(2 * n) {
1625 if a == b {
1626 return a;
1627 }
1628 if self.depth_of(a) > self.depth_of(b) {
1629 a = self.idom.get(a).and_then(|x| *x).unwrap_or(a);
1630 } else {
1631 b = self.idom.get(b).and_then(|x| *x).unwrap_or(b);
1632 }
1633 }
1634 0
1635 }
1636}
1637#[derive(Debug, Clone, PartialEq)]
1639pub struct LlvmGlobal {
1640 pub name: String,
1642 pub ty: LlvmType,
1644 pub linkage: LlvmLinkage,
1646 pub is_constant: bool,
1648 pub init: Option<LlvmValue>,
1650 pub align: Option<u32>,
1652}
1653#[allow(dead_code)]
1655#[derive(Debug, Clone, Default)]
1656pub struct LLVMExtPassStats {
1657 pub iterations: usize,
1658 pub changed: bool,
1659 pub nodes_visited: usize,
1660 pub nodes_modified: usize,
1661 pub time_ms: u64,
1662 pub memory_bytes: usize,
1663 pub errors: usize,
1664}
1665impl LLVMExtPassStats {
1666 #[allow(dead_code)]
1667 pub fn new() -> Self {
1668 Self::default()
1669 }
1670 #[allow(dead_code)]
1671 pub fn visit(&mut self) {
1672 self.nodes_visited += 1;
1673 }
1674 #[allow(dead_code)]
1675 pub fn modify(&mut self) {
1676 self.nodes_modified += 1;
1677 self.changed = true;
1678 }
1679 #[allow(dead_code)]
1680 pub fn iterate(&mut self) {
1681 self.iterations += 1;
1682 }
1683 #[allow(dead_code)]
1684 pub fn error(&mut self) {
1685 self.errors += 1;
1686 }
1687 #[allow(dead_code)]
1688 pub fn efficiency(&self) -> f64 {
1689 if self.nodes_visited == 0 {
1690 0.0
1691 } else {
1692 self.nodes_modified as f64 / self.nodes_visited as f64
1693 }
1694 }
1695 #[allow(dead_code)]
1696 pub fn merge(&mut self, o: &LLVMExtPassStats) {
1697 self.iterations += o.iterations;
1698 self.changed |= o.changed;
1699 self.nodes_visited += o.nodes_visited;
1700 self.nodes_modified += o.nodes_modified;
1701 self.time_ms += o.time_ms;
1702 self.memory_bytes = self.memory_bytes.max(o.memory_bytes);
1703 self.errors += o.errors;
1704 }
1705}
1706#[derive(Debug, Clone, PartialEq)]
1708pub enum LlvmInstr {
1709 Alloca {
1711 result: String,
1712 ty: LlvmType,
1713 align: Option<u32>,
1714 },
1715 Load {
1717 result: String,
1718 ty: LlvmType,
1719 ptr: LlvmValue,
1720 align: Option<u32>,
1721 },
1722 Store {
1724 val: LlvmValue,
1725 ty: LlvmType,
1726 ptr: LlvmValue,
1727 align: Option<u32>,
1728 },
1729 Add {
1731 result: String,
1732 lhs: LlvmValue,
1733 rhs: LlvmValue,
1734 },
1735 Sub {
1737 result: String,
1738 lhs: LlvmValue,
1739 rhs: LlvmValue,
1740 },
1741 Mul {
1743 result: String,
1744 lhs: LlvmValue,
1745 rhs: LlvmValue,
1746 },
1747 SDiv {
1749 result: String,
1750 lhs: LlvmValue,
1751 rhs: LlvmValue,
1752 },
1753 SRem {
1755 result: String,
1756 lhs: LlvmValue,
1757 rhs: LlvmValue,
1758 },
1759 FAdd {
1761 result: String,
1762 lhs: LlvmValue,
1763 rhs: LlvmValue,
1764 },
1765 FSub {
1767 result: String,
1768 lhs: LlvmValue,
1769 rhs: LlvmValue,
1770 },
1771 FMul {
1773 result: String,
1774 lhs: LlvmValue,
1775 rhs: LlvmValue,
1776 },
1777 FDiv {
1779 result: String,
1780 lhs: LlvmValue,
1781 rhs: LlvmValue,
1782 },
1783 And {
1785 result: String,
1786 lhs: LlvmValue,
1787 rhs: LlvmValue,
1788 },
1789 Or {
1791 result: String,
1792 lhs: LlvmValue,
1793 rhs: LlvmValue,
1794 },
1795 Xor {
1797 result: String,
1798 lhs: LlvmValue,
1799 rhs: LlvmValue,
1800 },
1801 Shl {
1803 result: String,
1804 lhs: LlvmValue,
1805 rhs: LlvmValue,
1806 },
1807 LShr {
1809 result: String,
1810 lhs: LlvmValue,
1811 rhs: LlvmValue,
1812 },
1813 AShr {
1815 result: String,
1816 lhs: LlvmValue,
1817 rhs: LlvmValue,
1818 },
1819 ICmp {
1821 result: String,
1822 pred: IcmpPred,
1823 lhs: LlvmValue,
1824 rhs: LlvmValue,
1825 },
1826 FCmp {
1828 result: String,
1829 pred: FcmpPred,
1830 lhs: LlvmValue,
1831 rhs: LlvmValue,
1832 },
1833 Br(String),
1835 CondBr {
1837 cond: LlvmValue,
1838 true_: String,
1839 false_: String,
1840 },
1841 Ret(Option<(LlvmType, LlvmValue)>),
1843 Unreachable,
1845 Label(String),
1847 Call {
1849 result: Option<String>,
1850 ret_ty: LlvmType,
1851 func: String,
1852 args: Vec<(LlvmType, LlvmValue)>,
1853 },
1854 GetElementPtr {
1856 result: String,
1857 base_ty: LlvmType,
1858 ptr: LlvmValue,
1859 indices: Vec<(LlvmType, LlvmValue)>,
1860 },
1861 BitCast {
1863 result: String,
1864 val: LlvmValue,
1865 from_ty: LlvmType,
1866 to_ty: LlvmType,
1867 },
1868 Phi {
1870 result: String,
1871 ty: LlvmType,
1872 incoming: Vec<(LlvmValue, String)>,
1873 },
1874 Select {
1876 result: String,
1877 cond: LlvmValue,
1878 true_val: LlvmValue,
1879 false_val: LlvmValue,
1880 ty: LlvmType,
1881 },
1882 ExtractValue {
1884 result: String,
1885 agg: LlvmValue,
1886 agg_ty: LlvmType,
1887 indices: Vec<u32>,
1888 },
1889 InsertValue {
1891 result: String,
1892 agg: LlvmValue,
1893 agg_ty: LlvmType,
1894 val: LlvmValue,
1895 val_ty: LlvmType,
1896 indices: Vec<u32>,
1897 },
1898 ZExt {
1900 result: String,
1901 val: LlvmValue,
1902 from_ty: LlvmType,
1903 to_ty: LlvmType,
1904 },
1905 SExt {
1907 result: String,
1908 val: LlvmValue,
1909 from_ty: LlvmType,
1910 to_ty: LlvmType,
1911 },
1912 Trunc {
1914 result: String,
1915 val: LlvmValue,
1916 from_ty: LlvmType,
1917 to_ty: LlvmType,
1918 },
1919 FpToSI {
1921 result: String,
1922 val: LlvmValue,
1923 from_ty: LlvmType,
1924 to_ty: LlvmType,
1925 },
1926 SIToFp {
1928 result: String,
1929 val: LlvmValue,
1930 from_ty: LlvmType,
1931 to_ty: LlvmType,
1932 },
1933 FpExt {
1935 result: String,
1936 val: LlvmValue,
1937 from_ty: LlvmType,
1938 to_ty: LlvmType,
1939 },
1940 FpTrunc {
1942 result: String,
1943 val: LlvmValue,
1944 from_ty: LlvmType,
1945 to_ty: LlvmType,
1946 },
1947}
1948#[allow(dead_code)]
1949#[derive(Debug, Clone, PartialEq)]
1950pub enum LLVMPassPhase {
1951 Analysis,
1952 Transformation,
1953 Verification,
1954 Cleanup,
1955}
1956impl LLVMPassPhase {
1957 #[allow(dead_code)]
1958 pub fn name(&self) -> &str {
1959 match self {
1960 LLVMPassPhase::Analysis => "analysis",
1961 LLVMPassPhase::Transformation => "transformation",
1962 LLVMPassPhase::Verification => "verification",
1963 LLVMPassPhase::Cleanup => "cleanup",
1964 }
1965 }
1966 #[allow(dead_code)]
1967 pub fn is_modifying(&self) -> bool {
1968 matches!(self, LLVMPassPhase::Transformation | LLVMPassPhase::Cleanup)
1969 }
1970}
1971#[allow(dead_code)]
1972#[derive(Debug, Clone)]
1973pub struct LLVMPassConfig {
1974 pub phase: LLVMPassPhase,
1975 pub enabled: bool,
1976 pub max_iterations: u32,
1977 pub debug_output: bool,
1978 pub pass_name: String,
1979}
1980impl LLVMPassConfig {
1981 #[allow(dead_code)]
1982 pub fn new(name: impl Into<String>, phase: LLVMPassPhase) -> Self {
1983 LLVMPassConfig {
1984 phase,
1985 enabled: true,
1986 max_iterations: 10,
1987 debug_output: false,
1988 pass_name: name.into(),
1989 }
1990 }
1991 #[allow(dead_code)]
1992 pub fn disabled(mut self) -> Self {
1993 self.enabled = false;
1994 self
1995 }
1996 #[allow(dead_code)]
1997 pub fn with_debug(mut self) -> Self {
1998 self.debug_output = true;
1999 self
2000 }
2001 #[allow(dead_code)]
2002 pub fn max_iter(mut self, n: u32) -> Self {
2003 self.max_iterations = n;
2004 self
2005 }
2006}
2007#[allow(dead_code)]
2008#[derive(Debug, Clone)]
2009pub struct LLVMDominatorTree {
2010 pub idom: Vec<Option<u32>>,
2011 pub dom_children: Vec<Vec<u32>>,
2012 pub dom_depth: Vec<u32>,
2013}
2014impl LLVMDominatorTree {
2015 #[allow(dead_code)]
2016 pub fn new(size: usize) -> Self {
2017 LLVMDominatorTree {
2018 idom: vec![None; size],
2019 dom_children: vec![Vec::new(); size],
2020 dom_depth: vec![0; size],
2021 }
2022 }
2023 #[allow(dead_code)]
2024 pub fn set_idom(&mut self, node: usize, idom: u32) {
2025 self.idom[node] = Some(idom);
2026 }
2027 #[allow(dead_code)]
2028 pub fn dominates(&self, a: usize, b: usize) -> bool {
2029 if a == b {
2030 return true;
2031 }
2032 let mut cur = b;
2033 loop {
2034 match self.idom[cur] {
2035 Some(parent) if parent as usize == a => return true,
2036 Some(parent) if parent as usize == cur => return false,
2037 Some(parent) => cur = parent as usize,
2038 None => return false,
2039 }
2040 }
2041 }
2042 #[allow(dead_code)]
2043 pub fn depth(&self, node: usize) -> u32 {
2044 self.dom_depth.get(node).copied().unwrap_or(0)
2045 }
2046}