Skip to main content

cranelift_codegen/regalloc/
spilling.rs

1//! Spilling pass.
2//!
3//! The spilling pass is the first to run after the liveness analysis. Its primary function is to
4//! ensure that the register pressure never exceeds the number of available registers by moving
5//! some SSA values to spill slots on the stack. This is encoded in the affinity of the value's
6//! live range.
7//!
8//! Some instruction operand constraints may require additional registers to resolve. Since this
9//! can cause spilling, the spilling pass is also responsible for resolving those constraints by
10//! inserting copies. The extra constraints are:
11//!
12//! 1. A value used by a tied operand must be killed by the instruction. This is resolved by
13//!    inserting a copy to a temporary value when necessary.
14//! 2. When the same value is used more than once by an instruction, the operand constraints must
15//!    be compatible. Otherwise, the value must be copied into a new register for some of the
16//!    operands.
17
18use crate::cursor::{Cursor, EncCursor};
19use crate::dominator_tree::DominatorTree;
20use crate::ir::{ArgumentLoc, Ebb, Function, Inst, InstBuilder, SigRef, Value, ValueLoc};
21use crate::isa::registers::{RegClass, RegClassIndex, RegClassMask, RegUnit};
22use crate::isa::{ConstraintKind, EncInfo, RecipeConstraints, RegInfo, TargetIsa};
23use crate::regalloc::affinity::Affinity;
24use crate::regalloc::live_value_tracker::{LiveValue, LiveValueTracker};
25use crate::regalloc::liveness::Liveness;
26use crate::regalloc::pressure::Pressure;
27use crate::regalloc::virtregs::VirtRegs;
28use crate::timing;
29use crate::topo_order::TopoOrder;
30use alloc::vec::Vec;
31use core::fmt;
32use log::debug;
33
34/// Return a top-level register class which contains `unit`.
35fn toprc_containing_regunit(unit: RegUnit, reginfo: &RegInfo) -> RegClass {
36    let bank = reginfo.bank_containing_regunit(unit).unwrap();
37    reginfo.classes[bank.first_toprc..(bank.first_toprc + bank.num_toprcs)]
38        .iter()
39        .find(|&rc| rc.contains(unit))
40        .expect("reg unit should be in a toprc")
41}
42
43/// Persistent data structures for the spilling pass.
44pub struct Spilling {
45    spills: Vec<Value>,
46    reg_uses: Vec<RegUse>,
47}
48
49/// Context data structure that gets instantiated once per pass.
50struct Context<'a> {
51    // Current instruction as well as reference to function and ISA.
52    cur: EncCursor<'a>,
53
54    // Cached ISA information.
55    reginfo: RegInfo,
56    encinfo: EncInfo,
57
58    // References to contextual data structures we need.
59    domtree: &'a DominatorTree,
60    liveness: &'a mut Liveness,
61    virtregs: &'a VirtRegs,
62    topo: &'a mut TopoOrder,
63
64    // Current register pressure.
65    pressure: Pressure,
66
67    // Values spilled for the current instruction. These values have already been removed from the
68    // pressure tracker, but they are still present in the live value tracker and their affinity
69    // hasn't been changed yet.
70    spills: &'a mut Vec<Value>,
71
72    // Uses of register values in the current instruction.
73    reg_uses: &'a mut Vec<RegUse>,
74}
75
76impl Spilling {
77    /// Create a new spilling data structure.
78    pub fn new() -> Self {
79        Self {
80            spills: Vec::new(),
81            reg_uses: Vec::new(),
82        }
83    }
84
85    /// Clear all data structures in this spilling pass.
86    pub fn clear(&mut self) {
87        self.spills.clear();
88        self.reg_uses.clear();
89    }
90
91    /// Run the spilling algorithm over `func`.
92    pub fn run(
93        &mut self,
94        isa: &dyn TargetIsa,
95        func: &mut Function,
96        domtree: &DominatorTree,
97        liveness: &mut Liveness,
98        virtregs: &VirtRegs,
99        topo: &mut TopoOrder,
100        tracker: &mut LiveValueTracker,
101    ) {
102        let _tt = timing::ra_spilling();
103        debug!("Spilling for:\n{}", func.display(isa));
104        let reginfo = isa.register_info();
105        let usable_regs = isa.allocatable_registers(func);
106        let mut ctx = Context {
107            cur: EncCursor::new(func, isa),
108            reginfo: isa.register_info(),
109            encinfo: isa.encoding_info(),
110            domtree,
111            liveness,
112            virtregs,
113            topo,
114            pressure: Pressure::new(&reginfo, &usable_regs),
115            spills: &mut self.spills,
116            reg_uses: &mut self.reg_uses,
117        };
118        ctx.run(tracker)
119    }
120}
121
122impl<'a> Context<'a> {
123    fn run(&mut self, tracker: &mut LiveValueTracker) {
124        self.topo.reset(self.cur.func.layout.ebbs());
125        while let Some(ebb) = self.topo.next(&self.cur.func.layout, self.domtree) {
126            self.visit_ebb(ebb, tracker);
127        }
128    }
129
130    fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
131        debug!("Spilling {}:", ebb);
132        self.cur.goto_top(ebb);
133        self.visit_ebb_header(ebb, tracker);
134        tracker.drop_dead_params();
135        self.process_spills(tracker);
136
137        while let Some(inst) = self.cur.next_inst() {
138            if !self.cur.func.dfg[inst].opcode().is_ghost() {
139                self.visit_inst(inst, ebb, tracker);
140            } else {
141                let (_throughs, kills) = tracker.process_ghost(inst);
142                self.free_regs(kills);
143            }
144            tracker.drop_dead(inst);
145            self.process_spills(tracker);
146        }
147    }
148
149    // Take all live registers in `regs` from the pressure set.
150    // This doesn't cause any spilling, it is assumed there are enough registers.
151    fn take_live_regs(&mut self, regs: &[LiveValue]) {
152        for lv in regs {
153            if !lv.is_dead {
154                if let Affinity::Reg(rci) = lv.affinity {
155                    let rc = self.reginfo.rc(rci);
156                    self.pressure.take(rc);
157                }
158            }
159        }
160    }
161
162    // Free all registers in `kills` from the pressure set.
163    fn free_regs(&mut self, kills: &[LiveValue]) {
164        for lv in kills {
165            if let Affinity::Reg(rci) = lv.affinity {
166                if !self.spills.contains(&lv.value) {
167                    let rc = self.reginfo.rc(rci);
168                    self.pressure.free(rc);
169                }
170            }
171        }
172    }
173
174    // Free all dead registers in `regs` from the pressure set.
175    fn free_dead_regs(&mut self, regs: &[LiveValue]) {
176        for lv in regs {
177            if lv.is_dead {
178                if let Affinity::Reg(rci) = lv.affinity {
179                    if !self.spills.contains(&lv.value) {
180                        let rc = self.reginfo.rc(rci);
181                        self.pressure.free(rc);
182                    }
183                }
184            }
185        }
186    }
187
188    fn visit_ebb_header(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
189        let (liveins, params) = tracker.ebb_top(
190            ebb,
191            &self.cur.func.dfg,
192            self.liveness,
193            &self.cur.func.layout,
194            self.domtree,
195        );
196
197        // Count the live-in registers. These should already fit in registers; they did at the
198        // dominator.
199        self.pressure.reset();
200        self.take_live_regs(liveins);
201
202        // An EBB can have an arbitrary (up to 2^16...) number of parameters, so they are not
203        // guaranteed to fit in registers.
204        for lv in params {
205            if let Affinity::Reg(rci) = lv.affinity {
206                let rc = self.reginfo.rc(rci);
207                'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
208                    debug!("Need {} reg for EBB param {}", rc, lv.value);
209                    match self.spill_candidate(mask, liveins) {
210                        Some(cand) => {
211                            debug!(
212                                "Spilling live-in {} to make room for {} EBB param {}",
213                                cand, rc, lv.value
214                            );
215                            self.spill_reg(cand);
216                        }
217                        None => {
218                            // We can't spill any of the live-in registers, so we have to spill an
219                            // EBB argument. Since the current spill metric would consider all the
220                            // EBB arguments equal, just spill the present register.
221                            debug!("Spilling {} EBB argument {}", rc, lv.value);
222
223                            // Since `spill_reg` will free a register, add the current one here.
224                            self.pressure.take(rc);
225                            self.spill_reg(lv.value);
226                            break 'try_take;
227                        }
228                    }
229                }
230            }
231        }
232
233        // The transient pressure counts for the EBB arguments are accurate. Just preserve them.
234        self.pressure.preserve_transient();
235        self.free_dead_regs(params);
236    }
237
238    fn visit_inst(&mut self, inst: Inst, ebb: Ebb, tracker: &mut LiveValueTracker) {
239        debug!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
240        debug_assert_eq!(self.cur.current_inst(), Some(inst));
241        debug_assert_eq!(self.cur.current_ebb(), Some(ebb));
242
243        let constraints = self
244            .encinfo
245            .operand_constraints(self.cur.func.encodings[inst]);
246
247        // We may need to resolve register constraints if there are any noteworthy uses.
248        debug_assert!(self.reg_uses.is_empty());
249        self.collect_reg_uses(inst, ebb, constraints);
250
251        // Calls usually have fixed register uses.
252        let call_sig = self.cur.func.dfg.call_signature(inst);
253        if let Some(sig) = call_sig {
254            self.collect_abi_reg_uses(inst, sig);
255        }
256
257        if !self.reg_uses.is_empty() {
258            self.process_reg_uses(inst, tracker);
259        }
260
261        // Update the live value tracker with this instruction.
262        let (throughs, kills, defs) = tracker.process_inst(inst, &self.cur.func.dfg, self.liveness);
263
264        // Remove kills from the pressure tracker.
265        self.free_regs(kills);
266
267        // If inst is a call, spill all register values that are live across the call.
268        // This means that we don't currently take advantage of callee-saved registers.
269        // TODO: Be more sophisticated.
270        if call_sig.is_some() {
271            for lv in throughs {
272                if lv.affinity.is_reg() && !self.spills.contains(&lv.value) {
273                    self.spill_reg(lv.value);
274                }
275            }
276        }
277
278        // Make sure we have enough registers for the register defs.
279        // Dead defs are included here. They need a register too.
280        // No need to process call return values, they are in fixed registers.
281        if let Some(constraints) = constraints {
282            for op in constraints.outs {
283                if op.kind != ConstraintKind::Stack {
284                    // Add register def to pressure, spill if needed.
285                    while let Err(mask) = self.pressure.take_transient(op.regclass) {
286                        debug!("Need {} reg from {} throughs", op.regclass, throughs.len());
287                        match self.spill_candidate(mask, throughs) {
288                            Some(cand) => self.spill_reg(cand),
289                            None => panic!(
290                                "Ran out of {} registers for {}",
291                                op.regclass,
292                                self.cur.display_inst(inst)
293                            ),
294                        }
295                    }
296                }
297            }
298            self.pressure.reset_transient();
299        }
300
301        // Restore pressure state, compute pressure with affinities from `defs`.
302        // Exclude dead defs. Includes call return values.
303        // This won't cause spilling.
304        self.take_live_regs(defs);
305    }
306
307    // Collect register uses that are noteworthy in one of the following ways:
308    //
309    // 1. It's a fixed register constraint.
310    // 2. It's a use of a spilled value.
311    // 3. It's a tied register constraint and the value isn't killed.
312    //
313    // We are assuming here that if a value is used both by a fixed register operand and a register
314    // class operand, they two are compatible. We are also assuming that two register class
315    // operands are always compatible.
316    fn collect_reg_uses(&mut self, inst: Inst, ebb: Ebb, constraints: Option<&RecipeConstraints>) {
317        let args = self.cur.func.dfg.inst_args(inst);
318        let num_fixed_ins = if let Some(constraints) = constraints {
319            for (idx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() {
320                let mut reguse = RegUse::new(arg, idx, op.regclass.into());
321                let lr = &self.liveness[arg];
322                match op.kind {
323                    ConstraintKind::Stack => continue,
324                    ConstraintKind::FixedReg(_) => reguse.fixed = true,
325                    ConstraintKind::Tied(_) => {
326                        // A tied operand must kill the used value.
327                        reguse.tied = !lr.killed_at(inst, ebb, &self.cur.func.layout);
328                    }
329                    ConstraintKind::FixedTied(_) => {
330                        reguse.fixed = true;
331                        reguse.tied = !lr.killed_at(inst, ebb, &self.cur.func.layout);
332                    }
333                    ConstraintKind::Reg => {}
334                }
335                if lr.affinity.is_stack() {
336                    reguse.spilled = true;
337                }
338
339                // Only collect the interesting register uses.
340                if reguse.fixed || reguse.tied || reguse.spilled {
341                    debug!("  reguse: {}", reguse);
342                    self.reg_uses.push(reguse);
343                }
344            }
345            constraints.ins.len()
346        } else {
347            // A non-ghost instruction with no constraints can't have any
348            // fixed operands.
349            0
350        };
351
352        // Similarly, for return instructions, collect uses of ABI-defined
353        // return values.
354        if self.cur.func.dfg[inst].opcode().is_return() {
355            debug_assert_eq!(
356                self.cur.func.dfg.inst_variable_args(inst).len(),
357                self.cur.func.signature.returns.len(),
358                "The non-fixed arguments in a return should follow the function's signature."
359            );
360            for (ret_idx, (ret, &arg)) in
361                self.cur.func.signature.returns.iter().zip(args).enumerate()
362            {
363                let idx = num_fixed_ins + ret_idx;
364                let unit = match ret.location {
365                    ArgumentLoc::Unassigned => {
366                        panic!("function return signature should be legalized")
367                    }
368                    ArgumentLoc::Reg(unit) => unit,
369                    ArgumentLoc::Stack(_) => continue,
370                };
371                let toprc = toprc_containing_regunit(unit, &self.reginfo);
372                let mut reguse = RegUse::new(arg, idx, toprc.into());
373                reguse.fixed = true;
374
375                debug!("  reguse: {}", reguse);
376                self.reg_uses.push(reguse);
377            }
378        }
379    }
380
381    // Collect register uses from the ABI input constraints.
382    fn collect_abi_reg_uses(&mut self, inst: Inst, sig: SigRef) {
383        let num_fixed_args = self.cur.func.dfg[inst]
384            .opcode()
385            .constraints()
386            .num_fixed_value_arguments();
387        let args = self.cur.func.dfg.inst_variable_args(inst);
388        for (idx, (abi, &arg)) in self.cur.func.dfg.signatures[sig]
389            .params
390            .iter()
391            .zip(args)
392            .enumerate()
393        {
394            if abi.location.is_reg() {
395                let (rci, spilled) = match self.liveness[arg].affinity {
396                    Affinity::Reg(rci) => (rci, false),
397                    Affinity::Stack => (
398                        self.cur.isa.regclass_for_abi_type(abi.value_type).into(),
399                        true,
400                    ),
401                    Affinity::Unassigned => panic!("Missing affinity for {}", arg),
402                };
403                let mut reguse = RegUse::new(arg, num_fixed_args + idx, rci);
404                reguse.fixed = true;
405                reguse.spilled = spilled;
406                self.reg_uses.push(reguse);
407            }
408        }
409    }
410
411    // Process multiple register uses to resolve potential conflicts.
412    //
413    // Look for multiple uses of the same value in `self.reg_uses` and insert copies as necessary.
414    // Trigger spilling if any of the temporaries cause the register pressure to become too high.
415    //
416    // Leave `self.reg_uses` empty.
417    fn process_reg_uses(&mut self, inst: Inst, tracker: &LiveValueTracker) {
418        // We're looking for multiple uses of the same value, so start by sorting by value. The
419        // secondary `opidx` key makes it possible to use an unstable (non-allocating) sort.
420        self.reg_uses.sort_unstable_by_key(|u| (u.value, u.opidx));
421
422        self.cur.use_srcloc(inst);
423        for i in 0..self.reg_uses.len() {
424            let ru = self.reg_uses[i];
425
426            // Do we need to insert a copy for this use?
427            let need_copy = if ru.tied {
428                true
429            } else if ru.fixed {
430                // This is a fixed register use which doesn't necessarily require a copy.
431                // Make a copy only if this is not the first use of the value.
432                self.reg_uses
433                    .get(i.wrapping_sub(1))
434                    .map_or(false, |ru2| ru2.value == ru.value)
435            } else {
436                false
437            };
438
439            if need_copy {
440                let copy = self.insert_copy(ru.value, ru.rci);
441                self.cur.func.dfg.inst_args_mut(inst)[ru.opidx as usize] = copy;
442            }
443
444            // Even if we don't insert a copy, we may need to account for register pressure for the
445            // reload pass.
446            if need_copy || ru.spilled {
447                let rc = self.reginfo.rc(ru.rci);
448                while let Err(mask) = self.pressure.take_transient(rc) {
449                    debug!("Copy of {} reg causes spill", rc);
450                    // Spill a live register that is *not* used by the current instruction.
451                    // Spilling a use wouldn't help.
452                    //
453                    // Do allow spilling of EBB arguments on branches. This is safe since we spill
454                    // the whole virtual register which includes the matching EBB parameter value
455                    // at the branch destination. It is also necessary since there can be
456                    // arbitrarily many EBB arguments.
457                    match {
458                        let args = if self.cur.func.dfg[inst].opcode().is_branch() {
459                            self.cur.func.dfg.inst_fixed_args(inst)
460                        } else {
461                            self.cur.func.dfg.inst_args(inst)
462                        };
463                        self.spill_candidate(
464                            mask,
465                            tracker.live().iter().filter(|lv| !args.contains(&lv.value)),
466                        )
467                    } {
468                        Some(cand) => self.spill_reg(cand),
469                        None => panic!(
470                            "Ran out of {} registers when inserting copy before {}",
471                            rc,
472                            self.cur.display_inst(inst)
473                        ),
474                    }
475                }
476            }
477        }
478        self.pressure.reset_transient();
479        self.reg_uses.clear()
480    }
481
482    // Find a spill candidate from `candidates` whose top-level register class is in `mask`.
483    fn spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value>
484    where
485        II: IntoIterator<Item = &'ii LiveValue>,
486    {
487        // Find the best viable spill candidate.
488        //
489        // The very simple strategy implemented here is to spill the value with the earliest def in
490        // the reverse post-order. This strategy depends on a good reload pass to generate good
491        // code.
492        //
493        // We know that all candidate defs dominate the current instruction, so one of them will
494        // dominate the others. That is the earliest def.
495        candidates
496            .into_iter()
497            .filter_map(|lv| {
498                // Viable candidates are registers in one of the `mask` classes, and not already in
499                // the spill set.
500                if let Affinity::Reg(rci) = lv.affinity {
501                    let rc = self.reginfo.rc(rci);
502                    if (mask & (1 << rc.toprc)) != 0 && !self.spills.contains(&lv.value) {
503                        // Here, `lv` is a viable spill candidate.
504                        return Some(lv.value);
505                    }
506                }
507                None
508            })
509            .min_by(|&a, &b| {
510                // Find the minimum candidate according to the RPO of their defs.
511                self.domtree.rpo_cmp(
512                    self.cur.func.dfg.value_def(a),
513                    self.cur.func.dfg.value_def(b),
514                    &self.cur.func.layout,
515                )
516            })
517    }
518
519    /// Spill `value` immediately by
520    ///
521    /// 1. Changing its affinity to `Stack` which marks the spill.
522    /// 2. Removing the value from the pressure tracker.
523    /// 3. Adding the value to `self.spills` for later reference by `process_spills`.
524    ///
525    /// Note that this does not update the cached affinity in the live value tracker. Call
526    /// `process_spills` to do that.
527    fn spill_reg(&mut self, value: Value) {
528        if let Affinity::Reg(rci) = self.liveness.spill(value) {
529            let rc = self.reginfo.rc(rci);
530            self.pressure.free(rc);
531            self.spills.push(value);
532            debug!("Spilled {}:{} -> {}", value, rc, self.pressure);
533        } else {
534            panic!("Cannot spill {} that was already on the stack", value);
535        }
536
537        // Assign a spill slot for the whole virtual register.
538        let ss = self
539            .cur
540            .func
541            .stack_slots
542            .make_spill_slot(self.cur.func.dfg.value_type(value));
543        for &v in self.virtregs.congruence_class(&value) {
544            self.liveness.spill(v);
545            self.cur.func.locations[v] = ValueLoc::Stack(ss);
546        }
547    }
548
549    /// Process any pending spills in the `self.spills` vector.
550    ///
551    /// It is assumed that spills are removed from the pressure tracker immediately, see
552    /// `spill_reg` above.
553    ///
554    /// We also need to update the live range affinity and remove spilled values from the live
555    /// value tracker.
556    fn process_spills(&mut self, tracker: &mut LiveValueTracker) {
557        if !self.spills.is_empty() {
558            tracker.process_spills(|v| self.spills.contains(&v));
559            self.spills.clear()
560        }
561    }
562
563    /// Insert a `copy value` before the current instruction and give it a live range extending to
564    /// the current instruction.
565    ///
566    /// Returns the new local value created.
567    fn insert_copy(&mut self, value: Value, rci: RegClassIndex) -> Value {
568        let copy = self.cur.ins().copy(value);
569        let inst = self.cur.built_inst();
570
571        // Update live ranges.
572        self.liveness.create_dead(copy, inst, Affinity::Reg(rci));
573        self.liveness.extend_locally(
574            copy,
575            self.cur.func.layout.pp_ebb(inst),
576            self.cur.current_inst().expect("must be at an instruction"),
577            &self.cur.func.layout,
578        );
579
580        copy
581    }
582}
583
584/// Struct representing a register use of a value.
585/// Used to detect multiple uses of the same value with incompatible register constraints.
586#[derive(Clone, Copy)]
587struct RegUse {
588    value: Value,
589    opidx: u16,
590
591    // Register class required by the use.
592    rci: RegClassIndex,
593
594    // A use with a fixed register constraint.
595    fixed: bool,
596
597    // A register use of a spilled value.
598    spilled: bool,
599
600    // A use with a tied register constraint *and* the used value is not killed.
601    tied: bool,
602}
603
604impl RegUse {
605    fn new(value: Value, idx: usize, rci: RegClassIndex) -> Self {
606        Self {
607            value,
608            opidx: idx as u16,
609            rci,
610            fixed: false,
611            spilled: false,
612            tied: false,
613        }
614    }
615}
616
617impl fmt::Display for RegUse {
618    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
619        write!(f, "{}@op{}", self.value, self.opidx)?;
620        if self.fixed {
621            write!(f, "/fixed")?;
622        }
623        if self.spilled {
624            write!(f, "/spilled")?;
625        }
626        if self.tied {
627            write!(f, "/tied")?;
628        }
629        Ok(())
630    }
631}