#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
use log::debug;
use std::cmp::Ordering;
use crate::avl_tree::{AVLTree, AVL_NULL};
use crate::data_structures::{
cmp_range_frags, RangeFrag, RangeFragIx, SortedRangeFragIxs, SpillSlot, TypedIxVec,
VirtualRange, VirtualRangeIx,
};
use crate::union_find::UnionFindEquivClasses;
use crate::Function;
enum LogicalSpillSlot {
InUse {
size: u32,
tree: AVLTree<RangeFragIx>,
},
Unavail,
}
impl LogicalSpillSlot {
fn is_Unavail(&self) -> bool {
match self {
LogicalSpillSlot::Unavail => true,
_ => false,
}
}
fn is_InUse(&self) -> bool {
!self.is_Unavail()
}
fn get_tree(&self) -> &AVLTree<RangeFragIx> {
match self {
LogicalSpillSlot::InUse { ref tree, .. } => tree,
LogicalSpillSlot::Unavail => panic!("LogicalSpillSlot::get_tree"),
}
}
fn get_mut_tree(&mut self) -> &mut AVLTree<RangeFragIx> {
match self {
LogicalSpillSlot::InUse { ref mut tree, .. } => tree,
LogicalSpillSlot::Unavail => panic!("LogicalSpillSlot::get_mut_tree"),
}
}
fn get_size(&self) -> u32 {
match self {
LogicalSpillSlot::InUse { size, .. } => *size,
LogicalSpillSlot::Unavail => panic!("LogicalSpillSlot::get_size"),
}
}
}
fn cmp_tree_entries_for_SpillSlotAllocator(
fix1: RangeFragIx,
fix2: RangeFragIx,
frag_env: &TypedIxVec<RangeFragIx, RangeFrag>,
) -> Option<Ordering> {
cmp_range_frags(&frag_env[fix1], &frag_env[fix2])
}
fn ssal_is_add_possible(
tree: &AVLTree<RangeFragIx>,
frag_env: &TypedIxVec<RangeFragIx, RangeFrag>,
frags: &SortedRangeFragIxs,
) -> bool {
for fix in &frags.frag_ixs {
let frag = &frag_env[*fix];
let mut root = tree.root;
while root != AVL_NULL {
let root_node = &tree.pool[root as usize];
let root_fix = root_node.item;
let root_frag = &frag_env[root_fix];
if frag.last < root_frag.first {
root = root_node.left;
} else if root_frag.last < frag.first {
root = root_node.right;
} else {
return false;
}
}
}
true
}
fn ssal_add_if_possible(
tree: &mut AVLTree<RangeFragIx>,
frag_env: &TypedIxVec<RangeFragIx, RangeFrag>,
frags: &SortedRangeFragIxs,
) -> bool {
if !ssal_is_add_possible(tree, frag_env, frags) {
return false;
}
for fix in &frags.frag_ixs {
let inserted = tree.insert(
*fix,
Some(&|fix1, fix2| cmp_tree_entries_for_SpillSlotAllocator(fix1, fix2, frag_env)),
);
assert!(inserted);
}
true
}
pub struct SpillSlotAllocator {
slots: Vec<LogicalSpillSlot>,
}
impl SpillSlotAllocator {
pub fn new() -> Self {
Self { slots: vec![] }
}
pub fn num_slots_in_use(&self) -> usize {
self.slots.len()
}
fn add_new_slot(&mut self, req_size: u32) -> u32 {
assert!(req_size == 1 || req_size == 2 || req_size == 4 || req_size == 8);
while self.slots.len() % (req_size as usize) != 0 {
self.slots.push(LogicalSpillSlot::Unavail);
}
let dflt = RangeFragIx::invalid_value();
let tree = AVLTree::<RangeFragIx>::new(dflt);
let res = self.slots.len() as u32;
self.slots.push(LogicalSpillSlot::InUse {
size: req_size,
tree,
});
for _ in 1..req_size {
self.slots.push(LogicalSpillSlot::Unavail);
}
assert!(self.slots.len() % (req_size as usize) == 0);
res
}
pub fn alloc_spill_slots<F: Function>(
&mut self,
vlr_slot_env: &mut TypedIxVec<VirtualRangeIx, Option<SpillSlot>>,
func: &F,
frag_env: &TypedIxVec<RangeFragIx, RangeFrag>,
vlr_env: &TypedIxVec<VirtualRangeIx, VirtualRange>,
vlrEquivClasses: &UnionFindEquivClasses<VirtualRangeIx>,
vlrix: VirtualRangeIx,
) {
for cand_vlrix in vlrEquivClasses.equiv_class_elems_iter(vlrix) {
assert!(vlr_slot_env[cand_vlrix].is_none());
}
let vlrix_vreg = vlr_env[vlrix].vreg;
let req_size = func.get_spillslot_size(vlrix_vreg.get_class(), vlrix_vreg);
assert!(req_size == 1 || req_size == 2 || req_size == 4 || req_size == 8);
let mut mb_chosen_slotno: Option<u32> = None;
'pass1_search_existing_slots: for cand_slot_no in 0..self.slots.len() as u32 {
let cand_slot = &self.slots[cand_slot_no as usize];
if !cand_slot.is_InUse() {
continue 'pass1_search_existing_slots;
}
if cand_slot.get_size() != req_size {
continue 'pass1_search_existing_slots;
}
let tree = &cand_slot.get_tree();
assert!(mb_chosen_slotno.is_none());
let mut all_cands_fit_individually = true;
for cand_vlrix in vlrEquivClasses.equiv_class_elems_iter(vlrix) {
let cand_vlr = &vlr_env[cand_vlrix];
let this_cand_fits = ssal_is_add_possible(&tree, &frag_env, &cand_vlr.sorted_frags);
if !this_cand_fits {
all_cands_fit_individually = false;
break;
}
}
if !all_cands_fit_individually {
continue 'pass1_search_existing_slots;
}
mb_chosen_slotno = Some(cand_slot_no);
break;
}
let chosen_slotno: u32 = if mb_chosen_slotno.is_none() {
self.add_new_slot(req_size)
} else {
mb_chosen_slotno.unwrap()
};
let mut _all_in_chosen = true;
'pass2_per_equiv_class: for cand_vlrix in vlrEquivClasses.equiv_class_elems_iter(vlrix) {
let cand_vlr = &vlr_env[cand_vlrix];
let mut tree = self.slots[chosen_slotno as usize].get_mut_tree();
let added = ssal_add_if_possible(&mut tree, &frag_env, &cand_vlr.sorted_frags);
if added {
vlr_slot_env[cand_vlrix] = Some(SpillSlot::new(chosen_slotno));
continue 'pass2_per_equiv_class;
}
for alt_slotno in 0..self.slots.len() as u32 {
let alt_slot = &self.slots[alt_slotno as usize];
if !alt_slot.is_InUse() {
continue;
}
if alt_slot.get_size() != req_size {
continue;
}
if alt_slotno == chosen_slotno {
continue;
}
let mut tree = self.slots[alt_slotno as usize].get_mut_tree();
let added = ssal_add_if_possible(&mut tree, &frag_env, &cand_vlr.sorted_frags);
if added {
vlr_slot_env[cand_vlrix] = Some(SpillSlot::new(alt_slotno));
continue 'pass2_per_equiv_class;
}
}
_all_in_chosen = false;
let new_slotno = self.add_new_slot(req_size);
let mut tree = self.slots[new_slotno as usize].get_mut_tree();
let added = ssal_add_if_possible(&mut tree, &frag_env, &cand_vlr.sorted_frags);
if added {
vlr_slot_env[cand_vlrix] = Some(SpillSlot::new(new_slotno));
continue 'pass2_per_equiv_class;
}
panic!("SpillSlotAllocator: alloc_spill_slots: failed?!?!");
}
for eclass_vlrix in vlrEquivClasses.equiv_class_elems_iter(vlrix) {
assert!(vlr_slot_env[eclass_vlrix].is_some());
debug!(
"-- alloc_spill_sls {:?} -> {:?}",
eclass_vlrix,
vlr_slot_env[eclass_vlrix].unwrap()
);
}
}
}