use rustc_hash::FxHashMap;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use std::cmp::Ordering;
use std::collections::VecDeque;
use std::fmt;
use std::hash::Hash;
use std::marker::PhantomData;
use std::ops::Index;
use std::ops::IndexMut;
use std::slice::{Iter, IterMut};
use crate::{Function, RegUsageMapper};
#[cfg(feature = "enable-serde")]
use serde::{Deserialize, Serialize};
pub type Queue<T> = VecDeque<T>;
pub type Map<K, V> = FxHashMap<K, V>;
#[derive(Clone)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct Set<T: Eq + Hash> {
set: FxHashSet<T>,
}
impl<T: Eq + Ord + Hash + Copy + fmt::Debug> Set<T> {
#[inline(never)]
pub fn empty() -> Self {
Self {
set: FxHashSet::<T>::default(),
}
}
#[inline(never)]
pub fn unit(item: T) -> Self {
let mut s = Self::empty();
s.insert(item);
s
}
#[inline(never)]
pub fn two(item1: T, item2: T) -> Self {
let mut s = Self::empty();
s.insert(item1);
s.insert(item2);
s
}
#[inline(never)]
pub fn card(&self) -> usize {
self.set.len()
}
#[inline(never)]
pub fn insert(&mut self, item: T) {
self.set.insert(item);
}
#[inline(never)]
pub fn delete(&mut self, item: T) {
self.set.remove(&item);
}
#[inline(never)]
pub fn is_empty(&self) -> bool {
self.set.is_empty()
}
#[inline(never)]
pub fn contains(&self, item: T) -> bool {
self.set.contains(&item)
}
#[inline(never)]
pub fn intersect(&mut self, other: &Self) {
let mut res = FxHashSet::<T>::default();
for item in self.set.iter() {
if other.set.contains(item) {
res.insert(*item);
}
}
self.set = res;
}
#[inline(never)]
pub fn union(&mut self, other: &Self) {
for item in other.set.iter() {
self.set.insert(*item);
}
}
#[inline(never)]
pub fn remove(&mut self, other: &Self) {
for item in other.set.iter() {
self.set.remove(item);
}
}
#[inline(never)]
pub fn intersects(&self, other: &Self) -> bool {
!self.set.is_disjoint(&other.set)
}
#[inline(never)]
pub fn is_subset_of(&self, other: &Self) -> bool {
self.set.is_subset(&other.set)
}
#[inline(never)]
pub fn to_vec(&self) -> Vec<T> {
let mut res = Vec::<T>::new();
for item in self.set.iter() {
res.push(*item)
}
res.sort_unstable();
res
}
#[inline(never)]
pub fn from_vec(vec: Vec<T>) -> Self {
let mut res = Set::<T>::empty();
for x in vec {
res.insert(x);
}
res
}
#[inline(never)]
pub fn equals(&self, other: &Self) -> bool {
self.set == other.set
}
#[inline(never)]
pub fn retain<F>(&mut self, f: F)
where
F: FnMut(&T) -> bool,
{
self.set.retain(f)
}
#[inline(never)]
pub fn map<F, U>(&self, f: F) -> Set<U>
where
F: Fn(&T) -> U,
U: Eq + Ord + Hash + Copy + fmt::Debug,
{
Set {
set: self.set.iter().map(f).collect(),
}
}
#[inline(never)]
pub fn filter_map<F, U>(&self, f: F) -> Set<U>
where
F: Fn(&T) -> Option<U>,
U: Eq + Ord + Hash + Copy + fmt::Debug,
{
Set {
set: self.set.iter().filter_map(f).collect(),
}
}
pub fn clear(&mut self) {
self.set.clear();
}
}
impl<T: Eq + Ord + Hash + Copy + fmt::Debug> fmt::Debug for Set<T> {
#[inline(never)]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let sorted_vec = self.to_vec();
let mut s = "{".to_string();
for i in 0..sorted_vec.len() {
if i > 0 {
s = s + &", ".to_string();
}
s = s + &format!("{:?}", &sorted_vec[i]);
}
s = s + &"}".to_string();
write!(fmt, "{}", s)
}
}
pub struct SetIter<'a, T> {
set_iter: std::collections::hash_set::Iter<'a, T>,
}
impl<T: Eq + Hash> Set<T> {
pub fn iter(&self) -> SetIter<T> {
SetIter {
set_iter: self.set.iter(),
}
}
}
impl<'a, T> Iterator for SetIter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.set_iter.next()
}
}
pub trait Zero {
fn zero() -> Self;
}
pub trait PlusN {
fn plus_n(&self, n: usize) -> Self;
}
#[derive(Clone, Copy)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct Range<T> {
first: T,
len: usize,
}
impl<T: Copy + PartialOrd + PlusN> IntoIterator for Range<T> {
type Item = T;
type IntoIter = MyIterator<T>;
fn into_iter(self) -> Self::IntoIter {
MyIterator {
range: self,
next: self.first,
}
}
}
impl<T: Copy + Eq + Ord + PlusN> Range<T> {
pub fn new(from: T, len: usize) -> Range<T> {
Range { first: from, len }
}
pub fn start(&self) -> T {
self.first
}
pub fn first(&self) -> T {
assert!(self.len() > 0);
self.start()
}
pub fn last(&self) -> T {
assert!(self.len() > 0);
self.start().plus_n(self.len() - 1)
}
pub fn last_plus1(&self) -> T {
self.start().plus_n(self.len())
}
pub fn len(&self) -> usize {
self.len
}
pub fn contains(&self, t: T) -> bool {
t >= self.first && t < self.first.plus_n(self.len)
}
}
pub struct MyIterator<T> {
range: Range<T>,
next: T,
}
impl<T: Copy + PartialOrd + PlusN> Iterator for MyIterator<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if self.next >= self.range.first.plus_n(self.range.len) {
None
} else {
let res = Some(self.next);
self.next = self.next.plus_n(1);
res
}
}
}
pub struct TypedIxVec<TyIx, Ty> {
vek: Vec<Ty>,
ty_ix: PhantomData<TyIx>,
}
impl<TyIx, Ty> TypedIxVec<TyIx, Ty>
where
Ty: Clone,
TyIx: Copy + Eq + Ord + Zero + PlusN + Into<u32>,
{
pub fn new() -> Self {
Self {
vek: Vec::new(),
ty_ix: PhantomData::<TyIx>,
}
}
pub fn from_vec(vek: Vec<Ty>) -> Self {
Self {
vek,
ty_ix: PhantomData::<TyIx>,
}
}
pub fn append(&mut self, other: &mut TypedIxVec<TyIx, Ty>) {
self.vek.append(&mut other.vek);
}
pub fn iter(&self) -> Iter<Ty> {
self.vek.iter()
}
pub fn iter_mut(&mut self) -> IterMut<Ty> {
self.vek.iter_mut()
}
pub fn len(&self) -> u32 {
self.vek.len() as u32
}
pub fn push(&mut self, item: Ty) {
self.vek.push(item);
}
pub fn resize(&mut self, new_len: u32, value: Ty) {
self.vek.resize(new_len as usize, value);
}
pub fn reserve(&mut self, additional: usize) {
self.vek.reserve(additional);
}
pub fn elems(&self) -> &[Ty] {
&self.vek[..]
}
pub fn elems_mut(&mut self) -> &mut [Ty] {
&mut self.vek[..]
}
pub fn range(&self) -> Range<TyIx> {
Range::new(TyIx::zero(), self.len() as usize)
}
pub fn remove(&mut self, idx: TyIx) -> Ty {
self.vek.remove(idx.into() as usize)
}
pub fn sort_by<F: FnMut(&Ty, &Ty) -> Ordering>(&mut self, compare: F) {
self.vek.sort_by(compare)
}
pub fn sort_unstable_by<F: FnMut(&Ty, &Ty) -> Ordering>(&mut self, compare: F) {
self.vek.sort_unstable_by(compare)
}
pub fn clear(&mut self) {
self.vek.clear();
}
}
impl<TyIx, Ty> Index<TyIx> for TypedIxVec<TyIx, Ty>
where
TyIx: Into<u32>,
{
type Output = Ty;
fn index(&self, ix: TyIx) -> &Ty {
&self.vek[ix.into() as usize]
}
}
impl<TyIx, Ty> IndexMut<TyIx> for TypedIxVec<TyIx, Ty>
where
TyIx: Into<u32>,
{
fn index_mut(&mut self, ix: TyIx) -> &mut Ty {
&mut self.vek[ix.into() as usize]
}
}
impl<TyIx, Ty> Clone for TypedIxVec<TyIx, Ty>
where
Ty: Clone,
{
fn clone(&self) -> Self {
Self {
vek: self.vek.clone(),
ty_ix: PhantomData::<TyIx>,
}
}
}
macro_rules! generate_boilerplate {
($TypeIx:ident, $Type:ident, $PrintingPrefix:expr) => {
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub enum $TypeIx {
$TypeIx(u32),
}
impl $TypeIx {
#[allow(dead_code)]
#[inline(always)]
pub fn new(n: u32) -> Self {
debug_assert!(n != u32::max_value());
Self::$TypeIx(n)
}
#[allow(dead_code)]
#[inline(always)]
pub const fn max_value() -> Self {
Self::$TypeIx(u32::max_value() - 1)
}
#[allow(dead_code)]
#[inline(always)]
pub const fn min_value() -> Self {
Self::$TypeIx(u32::min_value())
}
#[allow(dead_code)]
#[inline(always)]
pub const fn invalid_value() -> Self {
Self::$TypeIx(u32::max_value())
}
#[allow(dead_code)]
#[inline(always)]
pub fn is_valid(self) -> bool {
self != Self::invalid_value()
}
#[allow(dead_code)]
#[inline(always)]
pub fn is_invalid(self) -> bool {
self == Self::invalid_value()
}
#[allow(dead_code)]
#[inline(always)]
pub fn get(self) -> u32 {
debug_assert!(self.is_valid());
match self {
$TypeIx::$TypeIx(n) => n,
}
}
#[allow(dead_code)]
#[inline(always)]
pub fn plus(self, delta: u32) -> $TypeIx {
debug_assert!(self.is_valid());
$TypeIx::$TypeIx(self.get() + delta)
}
#[allow(dead_code)]
#[inline(always)]
pub fn minus(self, delta: u32) -> $TypeIx {
debug_assert!(self.is_valid());
$TypeIx::$TypeIx(self.get() - delta)
}
#[allow(dead_code)]
pub fn dotdot(&self, last_plus1: $TypeIx) -> Range<$TypeIx> {
debug_assert!(self.is_valid());
let len = (last_plus1.get() - self.get()) as usize;
Range::new(*self, len)
}
}
impl fmt::Debug for $TypeIx {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_invalid() {
write!(fmt, "{}<NONE>", $PrintingPrefix)
} else {
write!(fmt, "{}{}", $PrintingPrefix, &self.get())
}
}
}
impl PlusN for $TypeIx {
#[inline(always)]
fn plus_n(&self, n: usize) -> Self {
debug_assert!(self.is_valid());
self.plus(n as u32)
}
}
impl Into<u32> for $TypeIx {
#[inline(always)]
fn into(self) -> u32 {
debug_assert!(self.is_valid());
self.get()
}
}
impl Zero for $TypeIx {
#[inline(always)]
fn zero() -> Self {
$TypeIx::new(0)
}
}
};
}
generate_boilerplate!(InstIx, Inst, "i");
generate_boilerplate!(BlockIx, Block, "b");
generate_boilerplate!(RangeFragIx, RangeFrag, "f");
generate_boilerplate!(VirtualRangeIx, VirtualRange, "vr");
generate_boilerplate!(RealRangeIx, RealRange, "rr");
impl<TyIx, Ty: fmt::Debug> fmt::Debug for TypedIxVec<TyIx, Ty> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}", self.vek)
}
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub enum RegClass {
I32 = 0,
F32 = 1,
I64 = 2,
F64 = 3,
V128 = 4,
INVALID = 5,
}
pub const NUM_REG_CLASSES: usize = 5;
impl RegClass {
#[inline(always)]
pub fn rc_to_u32(self) -> u32 {
self as u32
}
#[inline(always)]
pub fn rc_to_usize(self) -> usize {
self as usize
}
#[inline(always)]
pub fn rc_from_u32(rc: u32) -> RegClass {
match rc {
0 => RegClass::I32,
1 => RegClass::F32,
2 => RegClass::I64,
3 => RegClass::F64,
4 => RegClass::V128,
_ => panic!("RegClass::rc_from_u32"),
}
}
pub fn short_name(self) -> &'static str {
match self {
RegClass::I32 => "I",
RegClass::I64 => "J",
RegClass::F32 => "F",
RegClass::F64 => "D",
RegClass::V128 => "V",
RegClass::INVALID => panic!("RegClass::short_name"),
}
}
pub fn long_name(self) -> &'static str {
match self {
RegClass::I32 => "I32",
RegClass::I64 => "I32",
RegClass::F32 => "F32",
RegClass::F64 => "F32",
RegClass::V128 => "V128",
RegClass::INVALID => panic!("RegClass::long_name"),
}
}
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct Reg {
bits: u32,
}
static INVALID_REG: u32 = 0xffffffff;
impl Reg {
#[inline(always)]
pub fn is_virtual(self) -> bool {
self.is_valid() && (self.bits & 0x8000_0000) != 0
}
#[inline(always)]
pub fn is_real(self) -> bool {
self.is_valid() && (self.bits & 0x8000_0000) == 0
}
pub fn new_real(rc: RegClass, enc: u8, index: u8) -> Self {
let n = (0 << 31) | (rc.rc_to_u32() << 28) | ((enc as u32) << 8) | ((index as u32) << 0);
Reg { bits: n }
}
pub fn new_virtual(rc: RegClass, index: u32) -> Self {
if index >= (1 << 28) {
panic!("new_virtual(): index too large");
}
let n = (1 << 31) | (rc.rc_to_u32() << 28) | (index << 0);
Reg { bits: n }
}
pub fn invalid() -> Reg {
Reg { bits: INVALID_REG }
}
#[inline(always)]
pub fn is_invalid(self) -> bool {
self.bits == INVALID_REG
}
#[inline(always)]
pub fn is_valid(self) -> bool {
!self.is_invalid()
}
pub fn is_virtual_or_invalid(self) -> bool {
self.is_virtual() || self.is_invalid()
}
pub fn is_real_or_invalid(self) -> bool {
self.is_real() || self.is_invalid()
}
#[inline(always)]
pub fn get_class(self) -> RegClass {
debug_assert!(self.is_valid());
RegClass::rc_from_u32((self.bits >> 28) & 0x7)
}
#[inline(always)]
pub fn get_index(self) -> usize {
debug_assert!(self.is_valid());
if self.is_virtual() {
(self.bits & ((1 << 28) - 1)) as usize
} else {
(self.bits & ((1 << 8) - 1)) as usize
}
}
#[inline(always)]
pub fn get_index_u32(self) -> u32 {
debug_assert!(self.is_valid());
if self.is_virtual() {
self.bits & ((1 << 28) - 1)
} else {
self.bits & ((1 << 8) - 1)
}
}
pub fn get_hw_encoding(self) -> u8 {
debug_assert!(self.is_valid());
if self.is_virtual() {
panic!("Virtual register does not have a hardware encoding")
} else {
((self.bits >> 8) & ((1 << 8) - 1)) as u8
}
}
pub fn as_virtual_reg(self) -> Option<VirtualReg> {
if self.is_virtual_or_invalid() {
Some(VirtualReg { reg: self })
} else {
None
}
}
pub fn as_real_reg(self) -> Option<RealReg> {
if self.is_real_or_invalid() {
Some(RealReg { reg: self })
} else {
None
}
}
pub fn show_with_rru(self, univ: &RealRegUniverse) -> String {
if self.is_real() && self.get_index() < univ.regs.len() {
univ.regs[self.get_index()].1.clone()
} else if self.is_valid() {
format!("{:?}", self)
} else {
"rINVALID".to_string()
}
}
}
impl fmt::Debug for Reg {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_valid() {
write!(
fmt,
"{}{}{}",
if self.is_virtual() { "v" } else { "r" },
self.get_index(),
self.get_class().short_name(),
)
} else {
write!(fmt, "rINVALID")
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct RealReg {
reg: Reg,
}
impl Reg {
pub fn to_real_reg(self) -> RealReg {
if self.is_virtual() {
panic!("Reg::to_real_reg: this is a virtual register")
} else {
RealReg { reg: self }
}
}
}
impl RealReg {
pub fn get_class(self) -> RegClass {
self.reg.get_class()
}
#[inline(always)]
pub fn get_index(self) -> usize {
self.reg.get_index()
}
pub fn get_hw_encoding(self) -> usize {
self.reg.get_hw_encoding() as usize
}
#[inline(always)]
pub fn to_reg(self) -> Reg {
self.reg
}
pub fn invalid() -> RealReg {
RealReg {
reg: Reg::invalid(),
}
}
pub fn is_valid(self) -> bool {
self.reg.is_valid()
}
pub fn is_invalid(self) -> bool {
self.reg.is_invalid()
}
pub fn maybe_valid(self) -> Option<RealReg> {
if self == RealReg::invalid() {
None
} else {
Some(self)
}
}
}
impl fmt::Debug for RealReg {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}", self.reg)
}
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct VirtualReg {
reg: Reg,
}
impl Reg {
#[inline(always)]
pub fn to_virtual_reg(self) -> VirtualReg {
if self.is_virtual() {
VirtualReg { reg: self }
} else {
panic!("Reg::to_virtual_reg: this is a real register")
}
}
}
impl VirtualReg {
pub fn get_class(self) -> RegClass {
self.reg.get_class()
}
#[inline(always)]
pub fn get_index(self) -> usize {
self.reg.get_index()
}
#[inline(always)]
pub fn to_reg(self) -> Reg {
self.reg
}
pub fn invalid() -> VirtualReg {
VirtualReg {
reg: Reg::invalid(),
}
}
pub fn is_valid(self) -> bool {
self.reg.is_valid()
}
pub fn is_invalid(self) -> bool {
self.reg.is_invalid()
}
pub fn maybe_valid(self) -> Option<VirtualReg> {
if self == VirtualReg::invalid() {
None
} else {
Some(self)
}
}
}
impl fmt::Debug for VirtualReg {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}", self.reg)
}
}
impl Reg {
pub fn apply_uses<RUM: RegUsageMapper>(&mut self, mapper: &RUM) {
self.apply(|vreg| mapper.get_use(vreg));
}
pub fn apply_defs<RUM: RegUsageMapper>(&mut self, mapper: &RUM) {
self.apply(|vreg| mapper.get_def(vreg));
}
pub fn apply_mods<RUM: RegUsageMapper>(&mut self, mapper: &RUM) {
self.apply(|vreg| mapper.get_mod(vreg));
}
fn apply<F: Fn(VirtualReg) -> Option<RealReg>>(&mut self, f: F) {
if let Some(vreg) = self.as_virtual_reg() {
if let Some(rreg) = f(vreg) {
debug_assert!(rreg.get_class() == vreg.get_class());
*self = rreg.to_reg();
} else {
panic!("Reg::apply: no mapping for {:?}", self);
}
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct Writable<R: WritableBase> {
reg: R,
}
pub trait WritableBase:
Copy + Clone + PartialEq + Eq + Hash + PartialOrd + Ord + fmt::Debug
{
}
impl WritableBase for Reg {}
impl WritableBase for RealReg {}
impl WritableBase for VirtualReg {}
impl<R: WritableBase> Writable<R> {
#[inline(always)]
pub fn from_reg(reg: R) -> Writable<R> {
Writable { reg }
}
pub fn to_reg(&self) -> R {
self.reg
}
pub fn map<F, U>(&self, f: F) -> Writable<U>
where
F: Fn(R) -> U,
U: WritableBase,
{
Writable { reg: f(self.reg) }
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SpillSlot(u32);
impl SpillSlot {
#[inline(always)]
pub fn new(n: u32) -> Self {
Self(n)
}
#[inline(always)]
pub fn get(self) -> u32 {
self.0
}
#[inline(always)]
pub fn get_usize(self) -> usize {
self.get() as usize
}
pub fn round_up(self, num_slots: u32) -> SpillSlot {
assert!(num_slots > 0);
SpillSlot::new((self.get() + num_slots - 1) / num_slots * num_slots)
}
pub fn inc(self, num_slots: u32) -> SpillSlot {
SpillSlot::new(self.get() + num_slots)
}
}
impl fmt::Debug for SpillSlot {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "S{}", self.get())
}
}
pub struct RegUsageCollector<'a> {
pub reg_vecs: &'a mut RegVecs,
}
impl<'a> RegUsageCollector<'a> {
pub fn new(reg_vecs: &'a mut RegVecs) -> Self {
Self { reg_vecs }
}
pub fn add_use(&mut self, r: Reg) {
self.reg_vecs.uses.push(r);
}
pub fn add_uses(&mut self, regs: &[Reg]) {
self.reg_vecs.uses.extend(regs.iter());
}
pub fn add_def(&mut self, r: Writable<Reg>) {
self.reg_vecs.defs.push(r.to_reg());
}
pub fn add_defs(&mut self, regs: &[Writable<Reg>]) {
self.reg_vecs.defs.reserve(regs.len());
for r in regs {
self.reg_vecs.defs.push(r.to_reg());
}
}
pub fn add_mod(&mut self, r: Writable<Reg>) {
self.reg_vecs.mods.push(r.to_reg());
}
pub fn add_mods(&mut self, regs: &[Writable<Reg>]) {
self.reg_vecs.mods.reserve(regs.len());
for r in regs {
self.reg_vecs.mods.push(r.to_reg());
}
}
pub fn get_use_def_mod_vecs_test_framework_only(&self) -> (Vec<Reg>, Vec<Reg>, Vec<Reg>) {
(
self.reg_vecs.uses.clone(),
self.reg_vecs.defs.clone(),
self.reg_vecs.mods.clone(),
)
}
pub fn get_empty_reg_vecs_test_framework_only(sanitized: bool) -> RegVecs {
RegVecs::new(sanitized)
}
}
#[derive(Debug)]
pub struct RegVecs {
pub uses: Vec<Reg>,
pub defs: Vec<Reg>,
pub mods: Vec<Reg>,
sanitized: bool,
}
impl RegVecs {
pub fn new(sanitized: bool) -> Self {
Self {
uses: vec![],
defs: vec![],
mods: vec![],
sanitized,
}
}
pub fn is_sanitized(&self) -> bool {
self.sanitized
}
pub fn set_sanitized(&mut self, sanitized: bool) {
self.sanitized = sanitized;
}
pub fn clear(&mut self) {
self.uses.clear();
self.defs.clear();
self.mods.clear();
}
}
#[derive(Clone, Debug)]
pub struct RegVecBounds {
pub uses_start: u32,
pub defs_start: u32,
pub mods_start: u32,
pub uses_len: u8,
pub defs_len: u8,
pub mods_len: u8,
}
impl RegVecBounds {
pub fn new() -> Self {
Self {
uses_start: 0,
defs_start: 0,
mods_start: 0,
uses_len: 0,
defs_len: 0,
mods_len: 0,
}
}
}
pub struct RegVecsAndBounds {
pub vecs: RegVecs,
pub bounds: TypedIxVec<InstIx, RegVecBounds>,
}
impl RegVecsAndBounds {
pub fn new(vecs: RegVecs, bounds: TypedIxVec<InstIx, RegVecBounds>) -> Self {
Self { vecs, bounds }
}
pub fn is_sanitized(&self) -> bool {
self.vecs.sanitized
}
#[allow(dead_code)] pub fn num_insns(&self) -> u32 {
self.bounds.len()
}
}
#[derive(Debug)]
pub struct RegSets {
pub uses: Set<Reg>, pub defs: Set<Reg>, pub mods: Set<Reg>, sanitized: bool,
}
impl RegSets {
pub fn new(sanitized: bool) -> Self {
Self {
uses: Set::<Reg>::empty(),
defs: Set::<Reg>::empty(),
mods: Set::<Reg>::empty(),
sanitized,
}
}
pub fn is_sanitized(&self) -> bool {
self.sanitized
}
}
impl RegVecsAndBounds {
#[inline(never)]
pub fn get_reg_sets_for_iix(&self, iix: InstIx) -> RegSets {
let bounds = &self.bounds[iix];
let mut regsets = RegSets::new(self.vecs.sanitized);
for i in bounds.uses_start as usize..bounds.uses_start as usize + bounds.uses_len as usize {
regsets.uses.insert(self.vecs.uses[i]);
}
for i in bounds.defs_start as usize..bounds.defs_start as usize + bounds.defs_len as usize {
regsets.defs.insert(self.vecs.defs[i]);
}
for i in bounds.mods_start as usize..bounds.mods_start as usize + bounds.mods_len as usize {
regsets.mods.insert(self.vecs.mods[i]);
}
regsets
}
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct RealRegUniverse {
pub regs: Vec<(RealReg, String)>,
pub allocable: usize,
pub allocable_by_class: [Option<RegClassInfo>; NUM_REG_CLASSES],
}
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct RegClassInfo {
pub first: usize,
pub last: usize,
pub suggested_scratch: Option<usize>,
}
impl RealRegUniverse {
pub fn show(&self) -> Vec<String> {
let mut res = vec![];
for class_num in 0..NUM_REG_CLASSES {
let class_info = match &self.allocable_by_class[class_num] {
None => continue,
Some(info) => info,
};
let class = RegClass::rc_from_u32(class_num as u32);
let mut class_str = "class ".to_string()
+ &class.long_name().to_string()
+ &"(".to_string()
+ &class.short_name().to_string()
+ &") at ".to_string();
class_str = class_str + &format!("[{} .. {}]: ", class_info.first, class_info.last);
for ix in class_info.first..=class_info.last {
class_str = class_str + &self.regs[ix].1;
if let Some(suggested_ix) = class_info.suggested_scratch {
if ix == suggested_ix {
class_str = class_str + "*";
}
}
class_str = class_str + " ";
}
res.push(class_str);
}
if self.allocable < self.regs.len() {
let mut stragglers = format!(
"not allocable at [{} .. {}]: ",
self.allocable,
self.regs.len() - 1
);
for ix in self.allocable..self.regs.len() {
stragglers = stragglers + &self.regs[ix].1 + &" ".to_string();
}
res.push(stragglers);
}
res
}
pub fn check_is_sane(&self) {
let regs_len = self.regs.len();
let regs_allocable = self.allocable;
let mut ok = regs_len <= 256;
if ok {
ok = regs_allocable <= regs_len;
}
if ok {
for i in 0..regs_len {
let (reg, _name) = &self.regs[i];
if ok && (reg.to_reg().is_virtual() || reg.get_index() != i) {
ok = false;
}
}
}
if ok {
let mut regclass_used = [false; NUM_REG_CLASSES];
for rc in 0..NUM_REG_CLASSES {
regclass_used[rc] = false;
}
for i in 0..regs_allocable {
let (reg, _name) = &self.regs[i];
let rc = reg.get_class().rc_to_u32() as usize;
regclass_used[rc] = true;
}
let mut regs_visited = 0;
for rc in 0..NUM_REG_CLASSES {
match &self.allocable_by_class[rc] {
&None => {
if regclass_used[rc] {
ok = false;
}
}
&Some(RegClassInfo {
first,
last,
suggested_scratch,
}) => {
if !regclass_used[rc] {
ok = false;
}
if ok {
for i in first..last + 1 {
let (reg, _name) = &self.regs[i];
if ok && RegClass::rc_from_u32(rc as u32) != reg.get_class() {
ok = false;
}
regs_visited += 1;
}
}
if ok {
if let Some(s) = suggested_scratch {
if s < first || s > last {
ok = false;
}
}
}
}
}
}
if ok && regs_visited != regs_allocable {
ok = false;
}
}
if !ok {
panic!("RealRegUniverse::check_is_sane: invalid RealRegUniverse");
}
}
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, Ord)]
pub enum Point {
Reload = 0,
Use = 1,
Def = 2,
Spill = 3,
}
impl Point {
#[inline(always)]
pub fn is_reload(self) -> bool {
match self {
Point::Reload => true,
_ => false,
}
}
#[inline(always)]
pub fn is_use(self) -> bool {
match self {
Point::Use => true,
_ => false,
}
}
#[inline(always)]
pub fn is_def(self) -> bool {
match self {
Point::Def => true,
_ => false,
}
}
#[inline(always)]
pub fn is_spill(self) -> bool {
match self {
Point::Spill => true,
_ => false,
}
}
#[inline(always)]
pub fn is_use_or_def(self) -> bool {
self.is_use() || self.is_def()
}
}
impl PartialOrd for Point {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
(*self as u32).partial_cmp(&(*other as u32))
}
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct InstPoint {
bits: u32,
}
impl InstPoint {
#[inline(always)]
pub fn new(iix: InstIx, pt: Point) -> Self {
let iix_n = iix.get();
assert!(iix_n < 0x4000_0000u32);
let pt_n = pt as u32;
InstPoint {
bits: (iix_n << 2) | pt_n,
}
}
#[inline(always)]
pub fn iix(self) -> InstIx {
InstIx::new(self.bits >> 2)
}
#[inline(always)]
pub fn pt(self) -> Point {
match self.bits & 3 {
0 => Point::Reload,
1 => Point::Use,
2 => Point::Def,
3 => Point::Spill,
_ => panic!("InstPt::pt: unreachable case"),
}
}
#[inline(always)]
pub fn set_iix(&mut self, iix: InstIx) {
let iix_n = iix.get();
assert!(iix_n < 0x4000_0000u32);
self.bits = (iix_n << 2) | (self.bits & 3);
}
#[inline(always)]
pub fn set_pt(&mut self, pt: Point) {
self.bits = (self.bits & 0xFFFF_FFFCu32) | pt as u32;
}
#[inline(always)]
pub fn new_reload(iix: InstIx) -> Self {
InstPoint::new(iix, Point::Reload)
}
#[inline(always)]
pub fn new_use(iix: InstIx) -> Self {
InstPoint::new(iix, Point::Use)
}
#[inline(always)]
pub fn new_def(iix: InstIx) -> Self {
InstPoint::new(iix, Point::Def)
}
#[inline(always)]
pub fn new_spill(iix: InstIx) -> Self {
InstPoint::new(iix, Point::Spill)
}
#[inline(always)]
pub fn invalid_value() -> Self {
Self {
bits: 0xFFFF_FFFFu32,
}
}
#[inline(always)]
pub fn max_value() -> Self {
Self {
bits: 0xFFFF_FFFFu32,
}
}
#[inline(always)]
pub fn min_value() -> Self {
Self { bits: 0u32 }
}
}
impl fmt::Debug for InstPoint {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"{:?}{}",
self.iix(),
match self.pt() {
Point::Reload => ".r",
Point::Use => ".u",
Point::Def => ".d",
Point::Spill => ".s",
}
)
}
}
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct RangeFrag {
pub first: InstPoint,
pub last: InstPoint,
}
impl fmt::Debug for RangeFrag {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "(RF: {:?}-{:?})", self.first, self.last)
}
}
impl RangeFrag {
#[allow(dead_code)] pub fn new(first: InstPoint, last: InstPoint) -> Self {
debug_assert!(first <= last);
RangeFrag { first, last }
}
pub fn invalid_value() -> Self {
Self {
first: InstPoint::invalid_value(),
last: InstPoint::invalid_value(),
}
}
pub fn new_with_metrics<F: Function>(
f: &F,
bix: BlockIx,
first: InstPoint,
last: InstPoint,
count: u16,
) -> (Self, RangeFragMetrics) {
debug_assert!(f.block_insns(bix).len() >= 1);
debug_assert!(f.block_insns(bix).contains(first.iix()));
debug_assert!(f.block_insns(bix).contains(last.iix()));
debug_assert!(first <= last);
if first == last {
debug_assert!(count == 1);
}
let first_iix_in_block = f.block_insns(bix).first();
let last_iix_in_block = f.block_insns(bix).last();
let first_pt_in_block = InstPoint::new_use(first_iix_in_block);
let last_pt_in_block = InstPoint::new_def(last_iix_in_block);
let kind = match (first == first_pt_in_block, last == last_pt_in_block) {
(false, false) => RangeFragKind::Local,
(false, true) => RangeFragKind::LiveOut,
(true, false) => RangeFragKind::LiveIn,
(true, true) => RangeFragKind::Thru,
};
(
RangeFrag { first, last },
RangeFragMetrics { bix, kind, count },
)
}
}
pub fn cmp_range_frags(f1: &RangeFrag, f2: &RangeFrag) -> Option<Ordering> {
if f1.last < f2.first {
return Some(Ordering::Less);
}
if f1.first > f2.last {
return Some(Ordering::Greater);
}
if f1.first == f2.first && f1.last == f2.last {
return Some(Ordering::Equal);
}
None
}
impl RangeFrag {
pub fn contains(&self, ipt: &InstPoint) -> bool {
self.first <= *ipt && *ipt <= self.last
}
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub enum RangeFragKind {
Local, LiveIn, LiveOut, Thru, }
impl fmt::Debug for RangeFragKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
RangeFragKind::Local => write!(fmt, "Local"),
RangeFragKind::LiveIn => write!(fmt, "LiveIn"),
RangeFragKind::LiveOut => write!(fmt, "LiveOut"),
RangeFragKind::Thru => write!(fmt, "Thru"),
}
}
}
#[derive(Clone, PartialEq)]
pub struct RangeFragMetrics {
pub bix: BlockIx,
pub kind: RangeFragKind,
pub count: u16,
}
impl fmt::Debug for RangeFragMetrics {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"(RFM: {:?}, count={}, {:?})",
self.kind, self.count, self.bix
)
}
}
#[derive(Clone)]
pub struct SortedRangeFragIxs {
pub frag_ixs: SmallVec<[RangeFragIx; 4]>,
}
impl fmt::Debug for SortedRangeFragIxs {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.frag_ixs.fmt(fmt)
}
}
impl SortedRangeFragIxs {
pub(crate) fn check(&self, fenv: &TypedIxVec<RangeFragIx, RangeFrag>) {
for i in 1..self.frag_ixs.len() {
let prev_frag = &fenv[self.frag_ixs[i - 1]];
let this_frag = &fenv[self.frag_ixs[i]];
if cmp_range_frags(prev_frag, this_frag) != Some(Ordering::Less) {
panic!("SortedRangeFragIxs::check: vector not ok");
}
}
}
pub fn sort(&mut self, fenv: &TypedIxVec<RangeFragIx, RangeFrag>) {
self.frag_ixs.sort_unstable_by(|fix_a, fix_b| {
match cmp_range_frags(&fenv[*fix_a], &fenv[*fix_b]) {
Some(Ordering::Less) => Ordering::Less,
Some(Ordering::Greater) => Ordering::Greater,
Some(Ordering::Equal) | None => {
panic!("SortedRangeFragIxs::sort: overlapping Frags!")
}
}
});
}
pub fn new(
frag_ixs: SmallVec<[RangeFragIx; 4]>,
fenv: &TypedIxVec<RangeFragIx, RangeFrag>,
) -> Self {
let mut res = SortedRangeFragIxs { frag_ixs };
res.sort(fenv);
res.check(fenv);
res
}
pub fn unit(fix: RangeFragIx, fenv: &TypedIxVec<RangeFragIx, RangeFrag>) -> Self {
let mut res = SortedRangeFragIxs {
frag_ixs: SmallVec::<[RangeFragIx; 4]>::new(),
};
res.frag_ixs.push(fix);
res.check(fenv);
res
}
pub fn contains_pt(&self, fenv: &TypedIxVec<RangeFragIx, RangeFrag>, pt: InstPoint) -> bool {
self.frag_ixs
.binary_search_by(|&ix| {
let frag = &fenv[ix];
if pt < frag.first {
Ordering::Greater
} else if pt >= frag.first && pt <= frag.last {
Ordering::Equal
} else {
Ordering::Less
}
})
.is_ok()
}
}
#[derive(Clone)]
pub struct SortedRangeFrags {
pub frags: SmallVec<[RangeFrag; 4]>,
}
impl fmt::Debug for SortedRangeFrags {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.frags.fmt(fmt)
}
}
impl SortedRangeFrags {
pub fn unit(frag: RangeFrag) -> Self {
let mut res = SortedRangeFrags {
frags: SmallVec::<[RangeFrag; 4]>::new(),
};
res.frags.push(frag);
res
}
pub fn empty() -> Self {
Self {
frags: SmallVec::<[RangeFrag; 4]>::new(),
}
}
pub fn overlaps(&self, other: &Self) -> bool {
let frags1 = &self.frags;
let frags2 = &other.frags;
let n1 = frags1.len();
let n2 = frags2.len();
let mut c1 = 0;
let mut c2 = 0;
loop {
if c1 >= n1 || c2 >= n2 {
return false; }
let f1 = &frags1[c1];
let f2 = &frags2[c2];
match cmp_range_frags(f1, f2) {
Some(Ordering::Less) => c1 += 1,
Some(Ordering::Greater) => c2 += 1,
_ => {
return true; }
}
}
}
pub fn contains_pt(&self, pt: InstPoint) -> bool {
self.frags
.binary_search_by(|frag| {
if pt < frag.first {
Ordering::Greater
} else if pt >= frag.first && pt <= frag.last {
Ordering::Equal
} else {
Ordering::Less
}
})
.is_ok()
}
}
#[derive(Copy, Clone)]
pub enum SpillCost {
Infinite, Finite(f32), }
impl fmt::Debug for SpillCost {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
SpillCost::Infinite => write!(fmt, "INFINITY"),
SpillCost::Finite(c) => write!(fmt, "{:<.3}", c),
}
}
}
impl SpillCost {
#[inline(always)]
pub fn zero() -> Self {
SpillCost::Finite(0.0)
}
#[inline(always)]
pub fn infinite() -> Self {
SpillCost::Infinite
}
#[inline(always)]
pub fn finite(cost: f32) -> Self {
assert!(cost.is_normal() || cost == 0.0);
assert!(cost >= 0.0);
assert!(cost < 1e18);
SpillCost::Finite(cost)
}
#[inline(always)]
pub fn is_zero(&self) -> bool {
match self {
SpillCost::Infinite => false,
SpillCost::Finite(c) => *c == 0.0,
}
}
#[inline(always)]
pub fn is_infinite(&self) -> bool {
match self {
SpillCost::Infinite => true,
SpillCost::Finite(_) => false,
}
}
#[inline(always)]
pub fn is_finite(&self) -> bool {
!self.is_infinite()
}
#[inline(always)]
pub fn is_less_than(&self, other: &Self) -> bool {
match (self, other) {
(SpillCost::Infinite, SpillCost::Infinite) => false,
(SpillCost::Finite(_), SpillCost::Infinite) => true,
(SpillCost::Infinite, SpillCost::Finite(_)) => false,
(SpillCost::Finite(c1), SpillCost::Finite(c2)) => c1 < c2,
}
}
#[inline(always)]
pub fn add(&mut self, other: &Self) {
match (*self, other) {
(SpillCost::Finite(c1), SpillCost::Finite(c2)) => {
*self = SpillCost::Finite(c1 + c2);
}
(_, _) => {
*self = SpillCost::Infinite;
}
}
}
}
#[derive(Clone)]
pub struct RealRange {
pub rreg: RealReg,
pub sorted_frags: SortedRangeFragIxs,
pub is_ref: bool,
}
impl fmt::Debug for RealRange {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"(RR: {:?}{}, {:?})",
self.rreg,
if self.is_ref { " REF" } else { "" },
self.sorted_frags
)
}
}
impl RealRange {
pub fn show_with_rru(&self, univ: &RealRegUniverse) -> String {
format!(
"(RR: {}{}, {:?})",
self.rreg.to_reg().show_with_rru(univ),
if self.is_ref { " REF" } else { "" },
self.sorted_frags
)
}
}
#[derive(Clone)]
pub struct VirtualRange {
pub vreg: VirtualReg,
pub rreg: Option<RealReg>,
pub sorted_frags: SortedRangeFrags,
pub is_ref: bool,
pub size: u16,
pub total_cost: u32,
pub spill_cost: SpillCost, }
impl VirtualRange {
pub fn overlaps(&self, other: &Self) -> bool {
self.sorted_frags.overlaps(&other.sorted_frags)
}
}
impl fmt::Debug for VirtualRange {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"(VR: {:?}{},",
self.vreg,
if self.is_ref { " REF" } else { "" }
)?;
if self.rreg.is_some() {
write!(fmt, " -> {:?}", self.rreg.unwrap())?;
}
write!(
fmt,
" sz={}, tc={}, sc={:?}, {:?})",
self.size, self.total_cost, self.spill_cost, self.sorted_frags
)
}
}
pub struct RegToRangesMaps {
pub rreg_to_rlrs_map: Vec< SmallVec<[RealRangeIx; 6]>>,
pub vreg_to_vlrs_map: Vec< SmallVec<[VirtualRangeIx; 3]>>,
pub rregs_with_many_frags: Vec<u32 >,
pub vregs_with_many_frags: Vec<u32 >,
pub many_frags_thresh: usize,
}
pub struct MoveInfoElem {
pub dst: Reg,
pub src: Reg,
pub iix: InstIx,
pub est_freq: u32,
}
pub struct MoveInfo {
pub moves: Vec<MoveInfoElem>,
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct RangeId {
bits: u32,
}
impl RangeId {
#[inline(always)]
pub fn new_real(rlrix: RealRangeIx) -> Self {
let n = rlrix.get();
assert!(n <= 0x7FFF_FFFF);
Self {
bits: n | 0x8000_0000,
}
}
#[inline(always)]
pub fn new_virtual(vlrix: VirtualRangeIx) -> Self {
let n = vlrix.get();
assert!(n <= 0x7FFF_FFFF);
Self { bits: n }
}
#[inline(always)]
pub fn is_real(self) -> bool {
self.bits & 0x8000_0000 != 0
}
#[allow(dead_code)]
#[inline(always)]
pub fn is_virtual(self) -> bool {
self.bits & 0x8000_0000 == 0
}
#[inline(always)]
pub fn to_real(self) -> RealRangeIx {
assert!(self.bits & 0x8000_0000 != 0);
RealRangeIx::new(self.bits & 0x7FFF_FFFF)
}
#[inline(always)]
pub fn to_virtual(self) -> VirtualRangeIx {
assert!(self.bits & 0x8000_0000 == 0);
VirtualRangeIx::new(self.bits)
}
#[inline(always)]
pub fn invalid_value() -> Self {
Self { bits: 0xFFFF_FFFF }
}
}
impl fmt::Debug for RangeId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_real() {
self.to_real().fmt(fmt)
} else {
self.to_virtual().fmt(fmt)
}
}
}