use crate::{
asm_generation::fuel::{analyses::liveness_analysis, compiler_constants},
asm_lang::{
allocated_ops::AllocatedRegister, virtual_register::*, AllocatedAbstractOp, Op,
VirtualImmediate12, VirtualImmediate18, VirtualImmediate24, VirtualOp,
},
};
use either::Either;
use indexmap::IndexMap;
use petgraph::{
stable_graph::NodeIndex,
visit::EdgeRef,
Direction::{Incoming, Outgoing},
};
use rustc_hash::{FxHashMap, FxHashSet};
use std::cmp::Ordering;
use std::collections::{hash_map, BTreeSet, HashMap};
use sway_error::error::CompileError;
use sway_ir::size_bytes_round_up_to_word_alignment;
use sway_types::{FxIndexSet, Span};
pub type InterferenceGraph =
petgraph::stable_graph::StableGraph<VirtualRegister, bool, petgraph::Directed>;
#[derive(Debug)]
struct RegisterAllocationStatus {
reg: AllocatedRegister,
used_by: BTreeSet<VirtualRegister>,
}
#[derive(Debug)]
pub(crate) struct RegisterPool {
registers: Vec<RegisterAllocationStatus>,
}
impl RegisterPool {
fn init() -> Self {
let reg_pool: Vec<RegisterAllocationStatus> = (0
..compiler_constants::NUM_ALLOCATABLE_REGISTERS)
.map(|x| RegisterAllocationStatus {
reg: AllocatedRegister::Allocated(x),
used_by: BTreeSet::new(),
})
.collect();
Self {
registers: reg_pool,
}
}
pub(crate) fn get_register(
&self,
virtual_register: &VirtualRegister,
) -> Option<AllocatedRegister> {
let allocated_reg =
self.registers
.iter()
.find(|RegisterAllocationStatus { reg: _, used_by }| {
used_by.contains(virtual_register)
});
allocated_reg.map(|RegisterAllocationStatus { reg, used_by: _ }| reg.clone())
}
}
pub(crate) fn create_interference_graph(
ops: &[Op],
live_out: &[BTreeSet<VirtualRegister>],
) -> (InterferenceGraph, HashMap<VirtualRegister, NodeIndex>) {
let mut interference_graph = InterferenceGraph::with_capacity(0, 0);
let mut reg_to_node_map: HashMap<VirtualRegister, NodeIndex> = HashMap::new();
ops.iter()
.fold(BTreeSet::new(), |mut tree, elem| {
let mut regs = elem.registers();
regs.retain(|®| reg.is_virtual());
tree.extend(regs);
tree
})
.iter()
.for_each(|®| {
reg_to_node_map.insert(reg.clone(), interference_graph.add_node(reg.clone()));
});
for (ix, regs) in live_out.iter().enumerate() {
match &ops[ix].opcode {
Either::Left(VirtualOp::MOVE(v, c)) => {
if let Some(ix1) = reg_to_node_map.get(v) {
for b in regs.iter() {
if let Some(ix2) = reg_to_node_map.get(b) {
if *b != *c && *b != *v {
interference_graph.update_edge(*ix1, *ix2, true);
}
}
}
}
}
_ => {
for v in &ops[ix].def_registers() {
if let Some(ix1) = reg_to_node_map.get(v) {
for b in regs.iter() {
if let Some(ix2) = reg_to_node_map.get(b) {
if *b != **v {
interference_graph.update_edge(*ix1, *ix2, true);
}
}
}
}
}
}
}
}
(interference_graph, reg_to_node_map)
}
pub(crate) fn coalesce_registers(
ops: &[Op],
live_out: Vec<BTreeSet<VirtualRegister>>,
interference_graph: &mut InterferenceGraph,
reg_to_node_map: &mut HashMap<VirtualRegister, NodeIndex>,
) -> (Vec<Op>, Vec<BTreeSet<VirtualRegister>>) {
let mut reg_to_reg_map = IndexMap::<&VirtualRegister, &VirtualRegister>::new();
let mut reduced_ops: Vec<Op> = Vec::with_capacity(ops.len());
let mut reduced_live_out: Vec<BTreeSet<VirtualRegister>> = Vec::with_capacity(live_out.len());
assert!(ops.len() == live_out.len());
for (op_idx, op) in ops.iter().enumerate() {
match &op.opcode {
Either::Left(VirtualOp::MOVE(x, y)) => {
match (x, y) {
(VirtualRegister::Virtual(_), VirtualRegister::Virtual(_)) => {
let mut r1 = x;
while let Some(t) = reg_to_reg_map.get(r1) {
r1 = t;
}
let mut r2 = y;
while let Some(t) = reg_to_reg_map.get(r2) {
r2 = t;
}
let ix1 = reg_to_node_map.get(r1).unwrap();
let ix2 = reg_to_node_map.get(r2).unwrap();
if r1 == r2 {
continue;
}
let r1_neighbours = interference_graph
.neighbors_undirected(*ix1)
.collect::<FxIndexSet<_>>();
let r2_neighbours = interference_graph
.neighbors_undirected(*ix2)
.collect::<FxIndexSet<_>>();
let briggs_safety = r1_neighbours
.union(&r2_neighbours)
.filter(|&&neighbour| {
interference_graph.neighbors_undirected(neighbour).count()
>= compiler_constants::NUM_ALLOCATABLE_REGISTERS as usize
})
.count()
< compiler_constants::NUM_ALLOCATABLE_REGISTERS as usize;
let george_safety = r2_neighbours.iter().all(|&r2_neighbor| {
r1_neighbours.contains(&r2_neighbor)
|| interference_graph.neighbors_undirected(r2_neighbor).count()
< compiler_constants::NUM_ALLOCATABLE_REGISTERS as usize
});
let safe = briggs_safety || george_safety;
if interference_graph.contains_edge(*ix1, *ix2)
|| interference_graph.contains_edge(*ix2, *ix1)
|| !safe
{
reduced_ops.push(op.clone());
reduced_live_out.push(live_out[op_idx].clone());
continue;
}
for neighbor in r1_neighbours {
if !interference_graph.contains_edge(*ix2, neighbor) {
interference_graph.update_edge(neighbor, *ix2, true);
}
}
interference_graph.remove_node(*ix1);
reg_to_node_map.insert(r1.clone(), *ix2);
reg_to_reg_map.insert(r1, r2);
}
_ => {
reduced_ops.push(op.clone());
reduced_live_out.push(live_out[op_idx].clone());
}
}
}
_ => {
reduced_ops.push(op.clone());
reduced_live_out.push(live_out[op_idx].clone());
}
}
}
let mut final_reg_to_reg_map = IndexMap::<&VirtualRegister, &VirtualRegister>::new();
for reg in reg_to_reg_map.keys() {
let mut temp = reg;
while let Some(t) = reg_to_reg_map.get(temp) {
temp = t;
}
final_reg_to_reg_map.insert(reg, temp);
}
for new_op in &mut reduced_ops {
*new_op = new_op.update_register(&final_reg_to_reg_map);
}
for new_live_out in &mut reduced_live_out {
for (old, &new) in &final_reg_to_reg_map {
if new_live_out.remove(old) {
new_live_out.insert(new.clone());
}
}
}
(reduced_ops, reduced_live_out)
}
fn compute_def_use_points(ops: &[Op]) -> FxHashMap<VirtualRegister, (Vec<usize>, Vec<usize>)> {
let mut res: FxHashMap<VirtualRegister, (Vec<usize>, Vec<usize>)> = FxHashMap::default();
for (idx, op) in ops.iter().enumerate() {
let mut op_use = op.use_registers();
let mut op_def = op.def_registers();
op_use.retain(|®| reg.is_virtual());
op_def.retain(|®| reg.is_virtual());
for &vreg in op_use.iter().filter(|reg| reg.is_virtual()) {
match res.entry(vreg.clone()) {
hash_map::Entry::Occupied(mut occ) => {
occ.get_mut().1.push(idx);
}
hash_map::Entry::Vacant(vac) => {
vac.insert((vec![], vec![idx]));
}
}
}
for &vreg in op_def.iter().filter(|reg| reg.is_virtual()) {
match res.entry(vreg.clone()) {
hash_map::Entry::Occupied(mut occ) => {
occ.get_mut().0.push(idx);
}
hash_map::Entry::Vacant(vac) => {
vac.insert((vec![idx], vec![]));
}
}
}
}
res
}
pub(crate) fn color_interference_graph(
interference_graph: &mut InterferenceGraph,
ops: &[Op],
live_out: &[BTreeSet<VirtualRegister>],
) -> Result<Vec<NodeIndex>, FxHashSet<VirtualRegister>> {
let mut stack = Vec::with_capacity(interference_graph.node_count());
let mut on_stack = FxHashSet::default();
let mut spills = FxHashSet::default();
let def_use_points = compute_def_use_points(ops);
let mut worklist = vec![];
let mut pending = FxHashSet::default();
for node in interference_graph.node_indices() {
let num_neighbors = interference_graph.neighbors_undirected(node).count();
if num_neighbors < compiler_constants::NUM_ALLOCATABLE_REGISTERS as usize {
worklist.push(node);
} else {
pending.insert(node);
}
}
fn get_connected_outgoing_neighbors(
interference_graph: &InterferenceGraph,
node_index: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + '_ {
interference_graph
.edges_directed(node_index, Outgoing)
.filter_map(|e| interference_graph[e.id()].then_some(e.target()))
}
fn get_connected_incoming_neighbors(
interference_graph: &InterferenceGraph,
node_index: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + '_ {
interference_graph
.edges_directed(node_index, Incoming)
.filter_map(|e| interference_graph[e.id()].then_some(e.source()))
}
fn get_connected_neighbours(
interference_graph: &InterferenceGraph,
node_index: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + '_ {
get_connected_outgoing_neighbors(interference_graph, node_index).chain(
get_connected_incoming_neighbors(interference_graph, node_index),
)
}
fn delete_edges<P: Fn(&VirtualRegister, &VirtualRegister) -> bool>(
interference_graph: &mut InterferenceGraph,
node_index: NodeIndex,
should_delete: P,
) {
let edges: Vec<_> = interference_graph
.edges_directed(node_index, Outgoing)
.chain(interference_graph.edges_directed(node_index, Incoming))
.map(|edge| edge.id())
.collect();
for e in edges {
let (source, target) = interference_graph.edge_endpoints(e).unwrap();
{
if should_delete(&interference_graph[source], &interference_graph[target]) {
interference_graph[e] = false;
}
}
}
}
loop {
while let Some(node_index) = worklist.pop() {
if on_stack.contains(&node_index) {
continue;
}
stack.push(node_index);
on_stack.insert(node_index);
if !spills.contains(&interference_graph[node_index]) {
delete_edges(interference_graph, node_index, |_, _| true)
}
let candidate_neighbors: Vec<_> = interference_graph
.neighbors_undirected(node_index)
.filter(|n| {
pending.contains(n)
&& get_connected_neighbours(interference_graph, *n).count()
< compiler_constants::NUM_ALLOCATABLE_REGISTERS as usize
})
.collect();
for candidate_neighbor in &candidate_neighbors {
pending.remove(candidate_neighbor);
worklist.push(*candidate_neighbor);
}
}
if let Some(spill_reg_index) = pending.iter().copied().max_by(|node1, node2| {
let node1_priority =
get_connected_incoming_neighbors(interference_graph, *node1).count();
let node2_priority =
get_connected_incoming_neighbors(interference_graph, *node2).count();
match node1_priority.cmp(&node2_priority) {
Ordering::Equal => {
let reg_cmp = interference_graph[*node1].cmp(&interference_graph[*node2]);
if reg_cmp == Ordering::Equal {
node1.index().cmp(&node2.index())
} else {
reg_cmp
}
}
other => other,
}
}) {
let spill_reg = interference_graph[spill_reg_index].clone();
spills.insert(spill_reg.clone());
let to_retain =
def_use_points
.get(&spill_reg)
.map_or(FxHashSet::default(), |(defs, uses)| {
let mut retains = FxHashSet::default();
for &def in defs {
retains
.extend(live_out[def].iter().filter(|reg| !spills.contains(*reg)));
}
for &r#use in uses.iter().filter(|&&r#use| r#use > 0) {
retains.extend(
live_out[r#use - 1]
.iter()
.filter(|reg| !spills.contains(*reg)),
);
}
retains
});
delete_edges(interference_graph, spill_reg_index, |source, target| {
!(to_retain.contains(source) || to_retain.contains(target))
});
pending.remove(&spill_reg_index);
worklist.push(spill_reg_index);
} else {
break;
}
}
if spills.is_empty() {
Ok(stack)
} else {
Err(spills)
}
}
pub(crate) fn allocate_registers(ops: &[Op]) -> Result<Vec<AllocatedAbstractOp>, CompileError> {
enum ColouringResult {
Success {
updated_ops: Vec<Op>,
interference_graph: InterferenceGraph,
colouring_stack: Vec<NodeIndex>,
},
SpillsNeeded {
updated_ops: Vec<Op>,
spills: FxHashSet<VirtualRegister>,
},
}
fn try_color(ops: &[Op]) -> ColouringResult {
let live_out = liveness_analysis(ops, true);
let (mut interference_graph, mut reg_to_node_ix) =
create_interference_graph(ops, &live_out);
let (updated_ops, live_out) =
coalesce_registers(ops, live_out, &mut interference_graph, &mut reg_to_node_ix);
match color_interference_graph(&mut interference_graph, &updated_ops, &live_out) {
Ok(colouring_stack) => ColouringResult::Success {
updated_ops,
interference_graph,
colouring_stack,
},
Err(spills) => ColouringResult::SpillsNeeded {
updated_ops,
spills,
},
}
}
let mut updated_ops_ref = ops;
let mut updated_ops;
let mut try_count = 0;
let (updated_ops, interference_graph, mut stack) = loop {
match try_color(updated_ops_ref) {
ColouringResult::Success {
updated_ops,
interference_graph,
colouring_stack,
} => {
break (updated_ops, interference_graph, colouring_stack);
}
ColouringResult::SpillsNeeded {
updated_ops: updated_ops_before_spill,
spills,
} => {
if try_count >= 4 {
let comment = updated_ops_before_spill
.iter()
.find_map(|op| {
if let Either::Right(crate::asm_lang::ControlFlowOp::Label(_)) =
op.opcode
{
Some(op.comment.clone())
} else {
None
}
})
.unwrap_or("unknown".into());
return Err(CompileError::InternalOwned(
format!(
"The allocator cannot resolve a register mapping for function {comment}. \
Using #[inline(never)] on some functions may help."
),
Span::dummy(),
));
}
try_count += 1;
updated_ops = spill(&updated_ops_before_spill, &spills);
updated_ops_ref = &updated_ops;
}
}
};
let pool = assign_registers(&interference_graph, &mut stack)?;
let mut buf = vec![];
for op in &updated_ops {
buf.push(AllocatedAbstractOp {
opcode: op.allocate_registers(&pool),
comment: op.comment.clone(),
owning_span: op.owning_span.clone(),
})
}
Ok(buf)
}
fn assign_registers(
interference_graph: &InterferenceGraph,
stack: &mut Vec<NodeIndex>,
) -> Result<RegisterPool, CompileError> {
let mut pool = RegisterPool::init();
while let Some(node) = stack.pop() {
let reg = interference_graph[node].clone();
let neighbors: BTreeSet<VirtualRegister> = interference_graph
.neighbors_undirected(node)
.map(|neighbor| interference_graph[neighbor].clone())
.collect();
if reg.is_virtual() {
let available =
pool.registers
.iter_mut()
.find(|RegisterAllocationStatus { reg: _, used_by }| {
neighbors.intersection(used_by).count() == 0
});
if let Some(RegisterAllocationStatus { reg: _, used_by }) = available {
used_by.insert(reg.clone());
} else {
return Err(CompileError::Internal(
"The allocator cannot resolve a register mapping for this program. \
Using #[inline(never)] on some functions may help.",
Span::dummy(),
));
}
}
}
Ok(pool)
}
fn spill(ops: &[Op], spills: &FxHashSet<VirtualRegister>) -> Vec<Op> {
let mut spilled: Vec<Op> = vec![];
let mut cfe_idx_opt = None;
let mut cfs_idx_opt = None;
for (op_idx, op) in ops.iter().enumerate() {
match &op.opcode {
Either::Left(VirtualOp::CFEI(..)) => {
assert!(cfe_idx_opt.is_none(), "Found more than one stack extension");
cfe_idx_opt = Some(op_idx);
}
Either::Left(VirtualOp::CFSI(..)) => {
assert!(cfs_idx_opt.is_none(), "Found more than one stack shrink");
cfs_idx_opt = Some(op_idx);
}
_ => (),
}
}
let cfe_idx = cfe_idx_opt.expect("Function does not have CFEI instruction for locals");
let Either::Left(VirtualOp::CFEI(
VirtualRegister::Constant(ConstantRegister::StackPointer),
virt_imm_24,
)) = &ops[cfe_idx].opcode
else {
panic!("Unexpected opcode");
};
let locals_size_bytes = virt_imm_24.value();
let locals_size_bytes = size_bytes_round_up_to_word_alignment!(locals_size_bytes);
let spill_offsets_bytes = spill_offsets(spills, locals_size_bytes);
let spills_size = (8 * spills.len()) as u32;
let new_locals_byte_size = locals_size_bytes + spills_size;
if new_locals_byte_size > compiler_constants::TWENTY_FOUR_BITS as u32 {
panic!("Enormous stack usage for locals.");
}
for (op_idx, op) in ops.iter().enumerate() {
if op_idx == cfe_idx {
spilled.push(Op {
opcode: Either::Left(VirtualOp::CFEI(
VirtualRegister::Constant(ConstantRegister::StackPointer),
VirtualImmediate24::new(new_locals_byte_size.into()),
)),
comment: op.comment.clone() + &format!(", register spills {spills_size} byte(s)"),
owning_span: op.owning_span.clone(),
});
} else if matches!(cfs_idx_opt, Some(cfs_idx) if cfs_idx == op_idx) {
spilled.push(Op {
opcode: Either::Left(VirtualOp::CFSI(
VirtualRegister::Constant(ConstantRegister::StackPointer),
VirtualImmediate24::new(new_locals_byte_size.into()),
)),
comment: op.comment.clone() + &format!(", register spills {spills_size} byte(s)"),
owning_span: op.owning_span.clone(),
});
} else {
let use_registers = op.use_registers();
let def_registers = op.def_registers();
fn calculate_offset_reg_wordimm(
inst_list: &mut Vec<Op>,
offset_bytes: u32,
) -> (VirtualRegister, VirtualImmediate12) {
assert!(offset_bytes.is_multiple_of(8));
if offset_bytes <= compiler_constants::EIGHTEEN_BITS as u32 {
let offset_mov_instr = Op {
opcode: Either::Left(VirtualOp::MOVI(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualImmediate18::new(offset_bytes.into()),
)),
comment: "[spill/refill]: set offset".to_string(),
owning_span: None,
};
inst_list.push(offset_mov_instr);
let offset_add_instr = Op {
opcode: Either::Left(VirtualOp::ADD(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualRegister::Constant(ConstantRegister::LocalsBase),
)),
comment: "[spill/refill]: add offset to stack base".to_string(),
owning_span: None,
};
inst_list.push(offset_add_instr);
(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualImmediate12::new(0),
)
} else {
assert!(offset_bytes <= compiler_constants::TWENTY_FOUR_BITS as u32);
let offset_upper_12 = offset_bytes >> 12;
let offset_lower_12 = offset_bytes & 0b111111111111;
assert!((offset_upper_12 << 12) + offset_lower_12 == offset_bytes);
let offset_upper_mov_instr = Op {
opcode: Either::Left(VirtualOp::MOVI(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualImmediate18::new(offset_upper_12.into()),
)),
comment: "[spill/refill]: compute offset".to_string(),
owning_span: None,
};
inst_list.push(offset_upper_mov_instr);
let offset_upper_shift_instr = Op {
opcode: Either::Left(VirtualOp::SLLI(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualImmediate12::new(12),
)),
comment: "[spill/refill]: compute offset".to_string(),
owning_span: None,
};
inst_list.push(offset_upper_shift_instr);
let offset_add_instr = Op {
opcode: Either::Left(VirtualOp::ADD(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualRegister::Constant(ConstantRegister::LocalsBase),
)),
comment: "[spill/refill]: compute offset".to_string(),
owning_span: None,
};
inst_list.push(offset_add_instr);
(
VirtualRegister::Constant(ConstantRegister::Scratch),
VirtualImmediate12::new((offset_lower_12 / 8).into()),
)
}
}
for &spilled_use in use_registers.iter().filter(|r#use| spills.contains(r#use)) {
let offset_bytes = spill_offsets_bytes[spilled_use];
assert!(offset_bytes.is_multiple_of(8));
if offset_bytes / 8 <= compiler_constants::TWELVE_BITS as u32 {
spilled.push(Op {
opcode: Either::Left(VirtualOp::LW(
spilled_use.clone(),
VirtualRegister::Constant(ConstantRegister::LocalsBase),
VirtualImmediate12::new((offset_bytes / 8).into()),
)),
comment: "[spill/refill]: refill from spill".to_string(),
owning_span: None,
});
} else {
let (offset_reg, offset_imm_word) =
calculate_offset_reg_wordimm(&mut spilled, offset_bytes);
let lw = Op {
opcode: Either::Left(VirtualOp::LW(
spilled_use.clone(),
offset_reg,
offset_imm_word,
)),
comment: "[spill/refill]: refill from spill".to_string(),
owning_span: None,
};
spilled.push(lw);
}
}
spilled.push(op.clone());
for &spilled_def in def_registers.iter().filter(|def| spills.contains(def)) {
let offset_bytes = spill_offsets_bytes[spilled_def];
assert!(offset_bytes.is_multiple_of(8));
if offset_bytes / 8 <= compiler_constants::TWELVE_BITS as u32 {
spilled.push(Op {
opcode: Either::Left(VirtualOp::SW(
VirtualRegister::Constant(ConstantRegister::LocalsBase),
spilled_def.clone(),
VirtualImmediate12::new((offset_bytes / 8).into()),
)),
comment: "[spill/refill]: spill".to_string(),
owning_span: None,
});
} else {
let (offset_reg, offset_imm_word) =
calculate_offset_reg_wordimm(&mut spilled, offset_bytes);
let sw = Op {
opcode: Either::Left(VirtualOp::SW(
offset_reg,
spilled_def.clone(),
offset_imm_word,
)),
comment: "[spill/refill]: spill".to_string(),
owning_span: None,
};
spilled.push(sw);
}
}
}
}
spilled
}
fn spill_offsets(
spills: &FxHashSet<VirtualRegister>,
locals_size_bytes: u32,
) -> FxHashMap<VirtualRegister, u32> {
let mut spill_regs: Vec<_> = spills.iter().collect();
spill_regs.sort();
spill_regs
.into_iter()
.enumerate()
.map(|(i, reg)| (reg.clone(), (i * 8) as u32 + locals_size_bytes))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
use rustc_hash::FxHashSet;
fn make_reg(name: &str) -> VirtualRegister {
VirtualRegister::Virtual(name.to_owned())
}
#[test]
fn spill_offsets_are_deterministic() {
let locals_size_bytes = 24u32;
let mut set_a = FxHashSet::default();
set_a.insert(make_reg("r1"));
set_a.insert(make_reg("r2"));
set_a.insert(make_reg("r3"));
let mut set_b = FxHashSet::default();
set_b.insert(make_reg("r3"));
set_b.insert(make_reg("r1"));
set_b.insert(make_reg("r2"));
let offsets_a = spill_offsets(&set_a, locals_size_bytes);
let offsets_b = spill_offsets(&set_b, locals_size_bytes);
assert_eq!(offsets_a, offsets_b);
let mut sorted: Vec<_> = offsets_a.into_iter().collect();
sorted.sort_by(|(reg_l, _), (reg_r, _)| reg_l.cmp(reg_r));
assert_eq!(
sorted,
vec![
(make_reg("r1"), locals_size_bytes),
(make_reg("r2"), locals_size_bytes + 8),
(make_reg("r3"), locals_size_bytes + 16),
]
);
assert_eq!(offsets_b.len(), 3);
}
}