regalloc/linear_scan/
mod.rs

1//! pub(crate) Implementation of the linear scan allocator algorithm.
2//!
3//! This tries to follow the implementation as suggested by:
4//!   Optimized Interval Splitting in a Linear Scan Register Allocator,
5//!     by Wimmer et al., 2005
6
7use log::{log_enabled, trace, Level};
8
9use std::env;
10use std::fmt;
11use std::{cmp::Ordering, default};
12
13use crate::{
14    checker::CheckerContext, reg_maps::MentionRegUsageMapper, Function, RealRegUniverse,
15    RegAllocError, RegAllocResult, RegClass, Set, SpillSlot, VirtualReg, NUM_REG_CLASSES,
16};
17use crate::{
18    checker::CheckerStackmapInfo,
19    inst_stream::{add_spills_reloads_and_moves, InstToInsertAndExtPoint},
20};
21use crate::{
22    data_structures::{BlockIx, InstIx, InstPoint, Point, RealReg, RegVecsAndBounds},
23    CheckerErrors, StackmapRequestInfo,
24};
25
26use analysis::{AnalysisInfo, RangeFrag};
27use smallvec::SmallVec;
28
29use self::analysis::{BlockBoundary, BlockPos};
30
31mod analysis;
32mod assign_registers;
33mod resolve_moves;
34
35#[derive(Default)]
36pub(crate) struct Statistics {
37    only_large: bool,
38
39    num_fixed: usize,
40    num_vregs: usize,
41    num_virtual_ranges: usize,
42
43    peak_active: usize,
44    peak_inactive: usize,
45
46    num_try_allocate_reg: usize,
47    num_try_allocate_reg_success: usize,
48
49    num_reg_splits: usize,
50    num_reg_splits_success: usize,
51}
52
53impl Drop for Statistics {
54    fn drop(&mut self) {
55        if self.only_large && self.num_vregs < 1000 {
56            return;
57        }
58        println!(
59            "stats: {} fixed; {} vreg; {} vranges; {} peak-active; {} peak-inactive, {} direct-alloc; {} total-alloc; {} partial-splits; {} partial-splits-attempts",
60            self.num_fixed,
61            self.num_vregs,
62            self.num_virtual_ranges,
63            self.peak_active,
64            self.peak_inactive,
65            self.num_try_allocate_reg_success,
66            self.num_try_allocate_reg,
67            self.num_reg_splits_success,
68            self.num_reg_splits,
69        );
70    }
71}
72
73/// Which strategy should we use when trying to find the best split position?
74/// TODO Consider loop depth to avoid splitting in the middle of a loop
75/// whenever possible.
76#[derive(Copy, Clone, Debug)]
77enum OptimalSplitStrategy {
78    From,
79    To,
80    NextFrom,
81    NextNextFrom,
82    PrevTo,
83    PrevPrevTo,
84    Mid,
85}
86
87#[derive(Clone)]
88pub struct LinearScanOptions {
89    split_strategy: OptimalSplitStrategy,
90    partial_split: bool,
91    partial_split_near_end: bool,
92    stats: bool,
93    large_stats: bool,
94}
95
96impl default::Default for LinearScanOptions {
97    fn default() -> Self {
98        // Useful for debugging.
99        let optimal_split_strategy = match env::var("LSRA_SPLIT") {
100            Ok(s) => match s.as_str() {
101                "t" | "to" => OptimalSplitStrategy::To,
102                "n" => OptimalSplitStrategy::NextFrom,
103                "nn" => OptimalSplitStrategy::NextNextFrom,
104                "p" => OptimalSplitStrategy::PrevTo,
105                "pp" => OptimalSplitStrategy::PrevPrevTo,
106                "m" | "mid" => OptimalSplitStrategy::Mid,
107                _ => OptimalSplitStrategy::From,
108            },
109            Err(_) => OptimalSplitStrategy::From,
110        };
111
112        let large_stats = env::var("LSRA_LARGE_STATS").is_ok();
113        let stats = env::var("LSRA_STATS").is_ok() || large_stats;
114
115        let partial_split = env::var("LSRA_PARTIAL").is_ok();
116        let partial_split_near_end = env::var("LSRA_PARTIAL_END").is_ok();
117
118        Self {
119            split_strategy: optimal_split_strategy,
120            partial_split,
121            partial_split_near_end,
122            stats,
123            large_stats,
124        }
125    }
126}
127
128impl fmt::Debug for LinearScanOptions {
129    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
130        writeln!(fmt, "linear scan")?;
131        write!(fmt, "  split: {:?}", self.split_strategy)
132    }
133}
134
135// Local shorthands.
136type RegUses = RegVecsAndBounds;
137
138/// A unique identifier for an interval.
139#[derive(Clone, Copy, PartialEq, Eq)]
140struct IntId(pub(crate) usize);
141
142impl fmt::Debug for IntId {
143    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
144        write!(fmt, "int{}", self.0)
145    }
146}
147
148#[derive(Clone)]
149struct FixedInterval {
150    reg: RealReg,
151    frags: Vec<RangeFrag>,
152}
153
154impl fmt::Display for FixedInterval {
155    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
156        write!(f, "fixed {:?} [", self.reg)?;
157        for (i, frag) in self.frags.iter().enumerate() {
158            if i > 0 {
159                write!(f, ", ")?;
160            }
161            if frag.ref_typed {
162                write!(f, "ref ")?;
163            }
164            write!(f, "({:?}, {:?})", frag.first, frag.last)?;
165        }
166        write!(f, "]")
167    }
168}
169
170impl FixedInterval {
171    /// Find the fragment that contains the given instruction point.
172    /// May crash if the point doesn't belong to any fragment.
173    pub(crate) fn find_frag(&self, pt: InstPoint) -> usize {
174        self.frags
175            .binary_search_by(|frag| {
176                if pt < frag.first {
177                    Ordering::Greater
178                } else if pt >= frag.first && pt <= frag.last {
179                    Ordering::Equal
180                } else {
181                    Ordering::Less
182                }
183            })
184            .unwrap()
185    }
186}
187
188type Safepoints = SmallVec<[(InstIx, usize); 8]>;
189
190#[derive(Clone)]
191pub(crate) struct VirtualInterval {
192    id: IntId,
193    vreg: VirtualReg,
194
195    /// Is this interval used for a reference type?
196    ref_typed: bool,
197
198    /// Parent interval in the split tree.
199    parent: Option<IntId>,
200    ancestor: Option<IntId>,
201    /// Child interval, if it has one, in the split tree.
202    child: Option<IntId>,
203
204    /// Location assigned to this live interval.
205    location: Location,
206
207    mentions: MentionMap,
208    block_boundaries: Vec<BlockBoundary>,
209    safepoints: Safepoints,
210    start: InstPoint,
211    end: InstPoint,
212}
213
214impl fmt::Display for VirtualInterval {
215    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
216        write!(fmt, "virtual {:?}", self.id)?;
217        if self.ref_typed {
218            write!(fmt, " ref")?;
219        }
220        if let Some(ref p) = self.parent {
221            write!(fmt, " (parent={:?})", p)?;
222        }
223        write!(
224            fmt,
225            ": {:?} {} [{:?}; {:?}]",
226            self.vreg, self.location, self.start, self.end
227        )?;
228        write!(
229            fmt,
230            " boundaries=[{}]",
231            self.block_boundaries
232                .iter()
233                .map(|boundary| format!(
234                    "{:?}{}",
235                    boundary.bix,
236                    if boundary.pos == BlockPos::Start {
237                        "s"
238                    } else {
239                        "e"
240                    }
241                ))
242                .collect::<Vec<_>>()
243                .join(", ")
244        )?;
245        if !self.safepoints.is_empty() {
246            write!(fmt, " safepoints=[")?;
247            for (i, sp) in self.safepoints.iter().enumerate() {
248                if i > 0 {
249                    write!(fmt, ", {:?}", sp.0)?;
250                } else {
251                    write!(fmt, "{:?}", sp.0)?;
252                }
253            }
254            write!(fmt, "]")?;
255        }
256        Ok(())
257    }
258}
259
260impl VirtualInterval {
261    fn new(
262        id: IntId,
263        vreg: VirtualReg,
264        start: InstPoint,
265        end: InstPoint,
266        mentions: MentionMap,
267        block_boundaries: Vec<BlockBoundary>,
268        ref_typed: bool,
269        safepoints: Safepoints,
270    ) -> Self {
271        Self {
272            id,
273            vreg,
274            parent: None,
275            ancestor: None,
276            child: None,
277            location: Location::None,
278            mentions,
279            block_boundaries,
280            safepoints,
281            start,
282            end,
283            ref_typed,
284        }
285    }
286    fn safepoints(&self) -> &Safepoints {
287        &self.safepoints
288    }
289    fn safepoints_mut(&mut self) -> &mut Safepoints {
290        &mut self.safepoints
291    }
292    fn mentions(&self) -> &MentionMap {
293        &self.mentions
294    }
295    fn mentions_mut(&mut self) -> &mut MentionMap {
296        &mut self.mentions
297    }
298    fn block_boundaries(&self) -> &[BlockBoundary] {
299        &self.block_boundaries
300    }
301    fn block_boundaries_mut(&mut self) -> &mut Vec<BlockBoundary> {
302        &mut self.block_boundaries
303    }
304    fn covers(&self, pos: InstPoint) -> bool {
305        self.start <= pos && pos <= self.end
306    }
307}
308
309/// This data structure tracks the mentions of a register (virtual or real) at a precise
310/// instruction point. It's a set encoded as three flags, one for each of use/mod/def.
311#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
312pub struct Mention(u8);
313
314impl fmt::Debug for Mention {
315    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
316        let mut comma = false;
317        if self.0 & 1 == 1 {
318            write!(fmt, "use")?;
319            comma = true;
320        }
321        if (self.0 >> 1) & 1 == 1 {
322            if comma {
323                write!(fmt, ",")?;
324            }
325            write!(fmt, "mod")?;
326            comma = true;
327        }
328        if (self.0 >> 2) & 1 == 1 {
329            if comma {
330                write!(fmt, ",")?;
331            }
332            write!(fmt, "def")?;
333        }
334        Ok(())
335    }
336}
337
338impl Mention {
339    fn new() -> Self {
340        Self(0)
341    }
342
343    // Setters.
344    fn add_use(&mut self) {
345        self.0 |= 1 << 0;
346    }
347    fn add_mod(&mut self) {
348        self.0 |= 1 << 1;
349    }
350    fn add_def(&mut self) {
351        self.0 |= 1 << 2;
352    }
353
354    // Getters.
355    fn is_use(&self) -> bool {
356        (self.0 & 0b001) != 0
357    }
358    fn is_mod(&self) -> bool {
359        (self.0 & 0b010) != 0
360    }
361    fn is_def(&self) -> bool {
362        (self.0 & 0b100) != 0
363    }
364    fn is_use_or_mod(&self) -> bool {
365        (self.0 & 0b011) != 0
366    }
367    fn is_mod_or_def(&self) -> bool {
368        (self.0 & 0b110) != 0
369    }
370}
371
372pub type MentionMap = SmallVec<[(InstIx, Mention); 2]>;
373
374#[derive(Debug, Clone, Copy)]
375pub(crate) enum Location {
376    None,
377    Reg(RealReg),
378    Stack(SpillSlot),
379}
380
381impl Location {
382    pub(crate) fn reg(&self) -> Option<RealReg> {
383        match self {
384            Location::Reg(reg) => Some(*reg),
385            _ => None,
386        }
387    }
388    pub(crate) fn spill(&self) -> Option<SpillSlot> {
389        match self {
390            Location::Stack(slot) => Some(*slot),
391            _ => None,
392        }
393    }
394    pub(crate) fn is_none(&self) -> bool {
395        match self {
396            Location::None => true,
397            _ => false,
398        }
399    }
400}
401
402impl fmt::Display for Location {
403    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
404        match self {
405            Location::None => write!(fmt, "none"),
406            Location::Reg(reg) => write!(fmt, "{:?}", reg),
407            Location::Stack(slot) => write!(fmt, "{:?}", slot),
408        }
409    }
410}
411
412/// A group of live intervals.
413pub struct Intervals {
414    virtuals: Vec<VirtualInterval>,
415    fixeds: Vec<FixedInterval>,
416}
417
418impl Intervals {
419    fn get(&self, int_id: IntId) -> &VirtualInterval {
420        &self.virtuals[int_id.0]
421    }
422    fn get_mut(&mut self, int_id: IntId) -> &mut VirtualInterval {
423        &mut self.virtuals[int_id.0]
424    }
425    fn num_virtual_intervals(&self) -> usize {
426        self.virtuals.len()
427    }
428
429    // Mutators.
430    fn set_reg(&mut self, int_id: IntId, reg: RealReg) {
431        let int = self.get_mut(int_id);
432        debug_assert!(int.location.is_none());
433        int.location = Location::Reg(reg);
434    }
435    fn set_spill(&mut self, int_id: IntId, slot: SpillSlot) {
436        let int = self.get_mut(int_id);
437        debug_assert!(int.location.spill().is_none());
438        int.location = Location::Stack(slot);
439    }
440    fn push_interval(&mut self, int: VirtualInterval) {
441        debug_assert!(int.id.0 == self.virtuals.len());
442        self.virtuals.push(int);
443    }
444    fn set_child(&mut self, int_id: IntId, child_id: IntId) {
445        if let Some(prev_child) = self.virtuals[int_id.0].child.clone() {
446            self.virtuals[child_id.0].child = Some(prev_child);
447            self.virtuals[prev_child.0].parent = Some(child_id);
448        }
449        self.virtuals[int_id.0].child = Some(child_id);
450    }
451}
452
453/// Finds the first use for the current interval that's located after the given
454/// `pos` (included), in a broad sense of use (any of use, def or mod).
455///
456/// Extends to the left, that is, "modified" means "used".
457#[inline(never)]
458fn next_use(interval: &VirtualInterval, pos: InstPoint, _reg_uses: &RegUses) -> Option<InstPoint> {
459    if log_enabled!(Level::Trace) {
460        trace!("find next use of {} after {:?}", interval, pos);
461    }
462
463    let mentions = interval.mentions();
464    let target = InstPoint::max(pos, interval.start);
465
466    let ret = match mentions.binary_search_by_key(&target.iix(), |mention| mention.0) {
467        Ok(index) => {
468            // Either the selected index is a perfect match, or the next mention is
469            // the correct answer.
470            let mention = &mentions[index];
471            if target.pt() == Point::Use {
472                if mention.1.is_use_or_mod() {
473                    Some(InstPoint::new_use(mention.0))
474                } else {
475                    Some(InstPoint::new_def(mention.0))
476                }
477            } else if target.pt() == Point::Def && mention.1.is_mod_or_def() {
478                Some(target)
479            } else if index == mentions.len() - 1 {
480                None
481            } else {
482                let mention = &mentions[index + 1];
483                if mention.1.is_use_or_mod() {
484                    Some(InstPoint::new_use(mention.0))
485                } else {
486                    Some(InstPoint::new_def(mention.0))
487                }
488            }
489        }
490
491        Err(index) => {
492            if index == mentions.len() {
493                None
494            } else {
495                let mention = &mentions[index];
496                if mention.1.is_use_or_mod() {
497                    Some(InstPoint::new_use(mention.0))
498                } else {
499                    Some(InstPoint::new_def(mention.0))
500                }
501            }
502        }
503    };
504
505    // TODO once the mentions are properly split, this could be removed, in
506    // theory.
507    let ret = match ret {
508        Some(pos) => {
509            if pos <= interval.end {
510                Some(pos)
511            } else {
512                None
513            }
514        }
515        None => None,
516    };
517
518    ret
519}
520
521/// Finds the last use of a vreg before a given target, including it in possible
522/// return values.
523/// Extends to the right, that is, modified means "def".
524#[inline(never)]
525fn last_use(interval: &VirtualInterval, pos: InstPoint, _reg_uses: &RegUses) -> Option<InstPoint> {
526    if log_enabled!(Level::Trace) {
527        trace!("searching last use of {} before {:?}", interval, pos,);
528    }
529
530    let mentions = interval.mentions();
531
532    let target = InstPoint::min(pos, interval.end);
533
534    let ret = match mentions.binary_search_by_key(&target.iix(), |mention| mention.0) {
535        Ok(index) => {
536            // Either the selected index is a perfect match, or the previous mention
537            // is the correct answer.
538            let mention = &mentions[index];
539            if target.pt() == Point::Def {
540                if mention.1.is_mod_or_def() {
541                    Some(InstPoint::new_def(mention.0))
542                } else {
543                    Some(InstPoint::new_use(mention.0))
544                }
545            } else if target.pt() == Point::Use && mention.1.is_use() {
546                Some(target)
547            } else if index == 0 {
548                None
549            } else {
550                let mention = &mentions[index - 1];
551                if mention.1.is_mod_or_def() {
552                    Some(InstPoint::new_def(mention.0))
553                } else {
554                    Some(InstPoint::new_use(mention.0))
555                }
556            }
557        }
558
559        Err(index) => {
560            if index == 0 {
561                None
562            } else {
563                let mention = &mentions[index - 1];
564                if mention.1.is_mod_or_def() {
565                    Some(InstPoint::new_def(mention.0))
566                } else {
567                    Some(InstPoint::new_use(mention.0))
568                }
569            }
570        }
571    };
572
573    // TODO once the mentions are properly split, this could be removed, in
574    // theory.
575    let ret = match ret {
576        Some(pos) => {
577            if pos >= interval.start {
578                Some(pos)
579            } else {
580                None
581            }
582        }
583        None => None,
584    };
585
586    trace!("mentions: {:?}", mentions);
587    trace!("found: {:?}", ret);
588
589    ret
590}
591
592/// Checks that each register class has its own scratch register in addition to one available
593/// register, and creates a mapping of register class -> scratch register.
594fn compute_scratches(
595    reg_universe: &RealRegUniverse,
596) -> Result<Vec<Option<RealReg>>, RegAllocError> {
597    let mut scratches_by_rc = vec![None; NUM_REG_CLASSES];
598    for i in 0..NUM_REG_CLASSES {
599        if let Some(info) = &reg_universe.allocable_by_class[i] {
600            if info.first == info.last {
601                return Err(RegAllocError::Other(
602                    "at least 2 registers required for linear scan".into(),
603                ));
604            }
605            let scratch = if let Some(suggested_reg) = info.suggested_scratch {
606                reg_universe.regs[suggested_reg].0
607            } else {
608                return Err(RegAllocError::MissingSuggestedScratchReg(
609                    RegClass::rc_from_u32(i as u32),
610                ));
611            };
612            scratches_by_rc[i] = Some(scratch);
613        }
614    }
615    Ok(scratches_by_rc)
616}
617
618/// Allocator top level.
619///
620/// `func` is modified so that, when this function returns, it will contain no VirtualReg uses.
621///
622/// Allocation can fail if there are insufficient registers to even generate spill/reload code, or
623/// if the function appears to have any undefined VirtualReg/RealReg uses.
624#[inline(never)]
625pub(crate) fn run<F: Function>(
626    func: &mut F,
627    reg_universe: &RealRegUniverse,
628    stackmap_request: Option<&StackmapRequestInfo>,
629    use_checker: bool,
630    opts: &LinearScanOptions,
631) -> Result<RegAllocResult<F>, RegAllocError> {
632    let AnalysisInfo {
633        reg_vecs_and_bounds: reg_uses,
634        intervals,
635        liveins,
636        liveouts,
637        cfg,
638        ..
639    } = analysis::run(func, reg_universe, stackmap_request)
640        .map_err(|err| RegAllocError::Analysis(err))?;
641
642    let scratches_by_rc = compute_scratches(reg_universe)?;
643
644    let stats = if opts.stats {
645        let mut stats = Statistics::default();
646        stats.num_fixed = intervals.fixeds.len();
647        stats.num_virtual_ranges = intervals.virtuals.len();
648        stats.num_vregs = intervals
649            .virtuals
650            .iter()
651            .map(|virt| virt.vreg.get_index())
652            .fold(0, |a, b| usize::max(a, b));
653        stats.only_large = opts.large_stats;
654        Some(stats)
655    } else {
656        None
657    };
658
659    if log_enabled!(Level::Trace) {
660        trace!("fixed intervals:");
661        for int in &intervals.fixeds {
662            trace!("{}", int);
663        }
664        trace!("");
665        trace!("unassigned intervals:");
666        for int in &intervals.virtuals {
667            trace!("{}", int);
668            for mention in &int.mentions {
669                trace!("  mention @ {:?}: {:?}", mention.0, mention.1);
670            }
671        }
672        trace!("");
673    }
674
675    let (intervals, mut num_spill_slots) = assign_registers::run(
676        opts,
677        func,
678        &reg_uses,
679        reg_universe,
680        &scratches_by_rc,
681        intervals,
682        stats,
683    )?;
684
685    let virtuals = &intervals.virtuals;
686
687    let memory_moves = resolve_moves::run(
688        func,
689        &cfg,
690        &reg_uses,
691        virtuals,
692        &liveins,
693        &liveouts,
694        &mut num_spill_slots,
695        &scratches_by_rc,
696    );
697
698    apply_registers(
699        func,
700        virtuals,
701        memory_moves,
702        reg_universe,
703        num_spill_slots,
704        use_checker,
705        stackmap_request,
706    )
707}
708
709#[inline(never)]
710fn set_registers<F: Function>(
711    func: &mut F,
712    virtual_intervals: &Vec<VirtualInterval>,
713    reg_universe: &RealRegUniverse,
714    use_checker: bool,
715    memory_moves: &Vec<InstToInsertAndExtPoint>,
716    stackmap_request: Option<&StackmapRequestInfo>,
717    stackmaps: &[Vec<SpillSlot>],
718) -> Result<Set<RealReg>, CheckerErrors> {
719    trace!("set_registers");
720
721    let mut clobbered_registers = Set::empty();
722
723    // Collect all the regs per instruction and mention set.
724    let capacity = virtual_intervals
725        .iter()
726        .map(|int| int.mentions.len())
727        .fold(0, |a, b| a + b);
728
729    if capacity == 0 {
730        // No virtual registers have been allocated, exit early.
731        return Ok(clobbered_registers);
732    }
733
734    let mut mention_map = Vec::with_capacity(capacity);
735
736    for int in virtual_intervals {
737        let rreg = match int.location.reg() {
738            Some(rreg) => rreg,
739            _ => continue,
740        };
741        trace!("int: {}", int);
742        trace!("  {:?}", int.mentions);
743        for &mention in &int.mentions {
744            mention_map.push((mention.0, mention.1, int.vreg, rreg));
745        }
746    }
747
748    // Sort by instruction index.
749    mention_map.sort_unstable_by_key(|quad| quad.0);
750
751    // Iterate over all the mentions.
752    let mut mapper = MentionRegUsageMapper::new();
753
754    // Set up checker state, if indicated by our configuration.
755    let mut checker: Option<CheckerContext> = None;
756    let mut insn_blocks: Vec<BlockIx> = vec![];
757    if use_checker {
758        let stackmap_info =
759            stackmap_request.map(|request| CheckerStackmapInfo { request, stackmaps });
760        checker = Some(CheckerContext::new(
761            func,
762            reg_universe,
763            memory_moves,
764            stackmap_info,
765        ));
766        insn_blocks.resize(func.insns().len(), BlockIx::new(0));
767        for block_ix in func.blocks() {
768            for insn_ix in func.block_insns(block_ix) {
769                insn_blocks[insn_ix.get() as usize] = block_ix;
770            }
771        }
772    }
773
774    let mut cur_quad_ix = 0;
775    for func_inst_ix in func.insn_indices() {
776        // Several items in the mention_map array may refer to the same instruction index, so
777        // iterate over all of them that are related to the current instruction index.
778        while let Some((iix, mention_set, vreg, rreg)) = mention_map.get(cur_quad_ix) {
779            if func_inst_ix != *iix {
780                break;
781            }
782
783            trace!(
784                "{:?}: {:?} is in {:?} at {:?}",
785                iix,
786                vreg,
787                rreg,
788                mention_set
789            );
790
791            // Fill in new information at the given index.
792            if mention_set.is_use() {
793                if let Some(prev_rreg) = mapper.lookup_use(*vreg) {
794                    debug_assert_eq!(prev_rreg, *rreg, "different use allocs for {:?}", vreg);
795                }
796                mapper.set_use(*vreg, *rreg);
797            }
798
799            let included_in_clobbers = func.is_included_in_clobbers(func.get_insn(*iix));
800            if mention_set.is_mod() {
801                if let Some(prev_rreg) = mapper.lookup_use(*vreg) {
802                    debug_assert_eq!(prev_rreg, *rreg, "different use allocs for {:?}", vreg);
803                }
804                if let Some(prev_rreg) = mapper.lookup_def(*vreg) {
805                    debug_assert_eq!(prev_rreg, *rreg, "different def allocs for {:?}", vreg);
806                }
807
808                mapper.set_use(*vreg, *rreg);
809                mapper.set_def(*vreg, *rreg);
810                if included_in_clobbers {
811                    clobbered_registers.insert(*rreg);
812                }
813            }
814
815            if mention_set.is_def() {
816                if let Some(prev_rreg) = mapper.lookup_def(*vreg) {
817                    debug_assert_eq!(prev_rreg, *rreg, "different def allocs for {:?}", *vreg);
818                }
819
820                mapper.set_def(*vreg, *rreg);
821                if included_in_clobbers {
822                    clobbered_registers.insert(*rreg);
823                }
824            }
825
826            cur_quad_ix += 1;
827        }
828
829        // At this point we've correctly filled the mapper; actually map the virtual registers to
830        // the real ones in the Function.
831        trace!("map_regs for {:?}", func_inst_ix);
832
833        // If available, make sure to update the checker's state *before* actually mapping the
834        // register; the checker must see the function with virtual registers, not real ones.
835        if let Some(ref mut checker) = checker {
836            let block_ix = insn_blocks[func_inst_ix.get() as usize];
837            checker
838                .handle_insn(reg_universe, func, block_ix, func_inst_ix, &mapper)
839                .unwrap();
840        }
841
842        let mut inst = func.get_insn_mut(func_inst_ix);
843        F::map_regs(&mut inst, &mapper);
844
845        mapper.clear();
846    }
847
848    if let Some(checker) = checker {
849        checker.run()?;
850    }
851
852    Ok(clobbered_registers)
853}
854
855fn compute_stackmaps(
856    intervals: &[VirtualInterval],
857    stackmap_request: Option<&StackmapRequestInfo>,
858) -> Vec<Vec<SpillSlot>> {
859    if let Some(request) = stackmap_request {
860        let mut stackmaps = vec![Vec::new(); request.safepoint_insns.len()];
861        for int in intervals {
862            if !int.ref_typed {
863                continue;
864            }
865            if let Some(slot) = int.location.spill() {
866                for &(_sp_iix, sp_ix) in &int.safepoints {
867                    stackmaps[sp_ix].push(slot);
868                }
869            }
870        }
871        stackmaps
872    } else {
873        vec![]
874    }
875}
876
877/// Fills in the register assignments into instructions.
878#[inline(never)]
879fn apply_registers<F: Function>(
880    func: &mut F,
881    virtual_intervals: &Vec<VirtualInterval>,
882    memory_moves: Vec<InstToInsertAndExtPoint>,
883    reg_universe: &RealRegUniverse,
884    num_spill_slots: u32,
885    use_checker: bool,
886    stackmap_request: Option<&StackmapRequestInfo>,
887) -> Result<RegAllocResult<F>, RegAllocError> {
888    trace!("apply_registers");
889
890    let stackmaps = compute_stackmaps(virtual_intervals, stackmap_request.clone());
891
892    let clobbered_registers = set_registers(
893        func,
894        virtual_intervals,
895        reg_universe,
896        use_checker,
897        &memory_moves,
898        stackmap_request,
899        &stackmaps,
900    )
901    .map_err(|err| RegAllocError::RegChecker(err))?;
902
903    let (final_insns, target_map, new_to_old_insn_map, new_safepoint_insns) =
904        add_spills_reloads_and_moves(
905            func,
906            stackmap_request.map(|request| request.safepoint_insns.as_slice()),
907            memory_moves,
908        )
909        .map_err(|e| RegAllocError::Other(e))?;
910
911    // And now remove from the clobbered registers set, all those not available to the allocator.
912    // But not removing the reserved regs, since we might have modified those.
913    clobbered_registers.filter_map(|&reg| {
914        if reg.get_index() >= reg_universe.allocable {
915            None
916        } else {
917            Some(reg)
918        }
919    });
920
921    Ok(RegAllocResult {
922        insns: final_insns,
923        target_map,
924        orig_insn_map: new_to_old_insn_map,
925        clobbered_registers,
926        num_spill_slots,
927        block_annotations: None,
928        stackmaps,
929        new_safepoint_insns,
930    })
931}