midenc_hir_transform/
spill.rs

1use alloc::{collections::VecDeque, rc::Rc};
2
3use midenc_hir::{
4    adt::{SmallDenseMap, SmallSet},
5    cfg::Graph,
6    dominance::{DomTreeNode, DominanceFrontier, DominanceInfo},
7    pass::{AnalysisManager, PostPassStatus},
8    traits::SingleRegion,
9    BlockRef, Builder, Context, FxHashMap, OpBuilder, OpOperand, Operation, OperationRef,
10    ProgramPoint, Region, RegionBranchOpInterface, RegionBranchPoint, RegionRef, Report, Rewriter,
11    SmallVec, SourceSpan, Spanned, StorableEntity, Usable, ValueRange, ValueRef,
12};
13use midenc_hir_analysis::analyses::{
14    spills::{Placement, Predecessor},
15    SpillAnalysis,
16};
17
18/// This interface is used in conjunction with [transform_spills] so that the transform can be used
19/// with any dialect, and more importantly, avoids forming a dependency on our own dialects for the
20/// subset of operations we need to emit/rewrite.
21pub trait TransformSpillsInterface {
22    /// Create an unconditional branch to `destination`
23    fn create_unconditional_branch(
24        &self,
25        builder: &mut OpBuilder,
26        destination: BlockRef,
27        arguments: &[ValueRef],
28        span: SourceSpan,
29    ) -> Result<(), Report>;
30
31    /// Create a spill for `value`, returning the spill instruction
32    fn create_spill(
33        &self,
34        builder: &mut OpBuilder,
35        value: ValueRef,
36        span: SourceSpan,
37    ) -> Result<OperationRef, Report>;
38
39    /// Create a reload of `value`, returning the reload instruction
40    fn create_reload(
41        &self,
42        builder: &mut OpBuilder,
43        value: ValueRef,
44        span: SourceSpan,
45    ) -> Result<OperationRef, Report>;
46
47    /// Convert `spill`, a [SpillLike] operation, into a primitive memory store of the spilled
48    /// value.
49    fn convert_spill_to_store(
50        &mut self,
51        rewriter: &mut dyn Rewriter,
52        spill: OperationRef,
53    ) -> Result<(), Report>;
54
55    /// Convert `reload`, a [ReloadLike] operation, into a primitive memory load of the spilled
56    /// value.
57    fn convert_reload_to_load(
58        &mut self,
59        rewriter: &mut dyn Rewriter,
60        reload: OperationRef,
61    ) -> Result<(), Report>;
62}
63
64/// An operation trait for operations that implement spill-like behavior for purposes of the
65/// spills transformation/rewrite.
66///
67/// A spill-like operation is expected to take a single value, and store it somewhere in memory
68/// temporarily, such that the live range of the original value is terminated by the spill. Spilled
69/// values may then be reloaded, starting a new live range, using the corresponding [ReloadLike] op.
70pub trait SpillLike {
71    /// Returns the operand corresponding to the spilled value
72    fn spilled(&self) -> OpOperand;
73    /// Returns a reference to the spilled value
74    fn spilled_value(&self) -> ValueRef {
75        self.spilled().borrow().as_value_ref()
76    }
77}
78
79/// An operation trait for operations that implement reload-like behavior for purposes of the
80/// spills transformation/rewrite.
81///
82/// A reload-like operation is expected to take a single value, for which a dominating [SpillLike]
83/// op exists, and produce a new, unique SSA value corresponding to the reloaded spill value. The
84/// spills transformation will handle rewriting any uses of the [SpillLike] and [ReloadLike] ops
85/// such that they are not present after the transformation, in conjunction with an implementation
86/// of the [TransformSpillsInterface].
87pub trait ReloadLike {
88    /// Returns the operand corresponding to the spilled value
89    fn spilled(&self) -> OpOperand;
90    /// Returns a reference to the spilled value
91    fn spilled_value(&self) -> ValueRef {
92        self.spilled().borrow().as_value_ref()
93    }
94    /// Returns the value representing this unique reload of the spilled value
95    ///
96    /// Generally, this always corresponds to this op's result
97    fn reloaded(&self) -> ValueRef;
98}
99
100/// This transformation rewrites `op` by applying the results of the provided [SpillAnalysis],
101/// using the provided implementation of the [TransformSpillsInterface].
102///
103/// In effect, it performs the following steps:
104///
105/// * Splits all control flow edges that need to carry spills/reloads
106/// * Inserts all spills and reloads at their computed locations
107/// * Rewrites `op` such that all uses of a spilled value dominated by a reload, are rewritten to
108///   use that reload, or in the case of crossing a dominance frontier, a materialized block
109///   argument/phi representing the closest definition of that value from each predecessor.
110/// * Rewrites all spill and reload instructions to their primitive memory store/load ops
111pub fn transform_spills(
112    op: OperationRef,
113    analysis: &mut SpillAnalysis,
114    interface: &mut dyn TransformSpillsInterface,
115    analysis_manager: AnalysisManager,
116) -> Result<PostPassStatus, Report> {
117    assert!(
118        op.borrow().implements::<dyn SingleRegion>(),
119        "the spills transformation is not supported when the root op is multi-region"
120    );
121
122    let mut builder = OpBuilder::new(op.borrow().context_rc());
123
124    log::debug!(target: "insert-spills", "analysis determined that some spills were required");
125    log::debug!(target: "insert-spills", "    edges to split = {}", analysis.splits().len());
126    log::debug!(target: "insert-spills", "    values spilled = {}", analysis.spills().len());
127    log::debug!(target: "insert-spills", "    reloads issued = {}", analysis.reloads().len());
128
129    // Split all edges along which spills/reloads are required
130    for split_info in analysis.splits_mut() {
131        log::trace!(target: "insert-spills", "splitting control flow edge {} -> {}", match split_info.predecessor {
132            Predecessor::Parent => ProgramPoint::before(split_info.predecessor.operation(split_info.point)),
133            Predecessor::Block { op, .. } | Predecessor::Region(op) => ProgramPoint::at_end_of(op.parent().unwrap()),
134        }, split_info.point);
135
136        let predecessor_block = split_info
137            .predecessor
138            .block()
139            .unwrap_or_else(|| todo!("implement support for splits following a region branch op"));
140        let predecessor_region = predecessor_block.parent().unwrap();
141
142        // Create the split and switch the insertion point to the end of it
143        let split = builder.create_block(predecessor_region, Some(predecessor_block), &[]);
144        log::trace!(target: "insert-spills", "created {split} to hold contents of split edge");
145
146        // Record the block we created for this split
147        split_info.split = Some(split);
148
149        // Rewrite the terminator in the predecessor so that it transfers control to the
150        // original successor via `split`, moving any block arguments into the unconditional
151        // branch that terminates `split`.
152        match split_info.predecessor {
153            Predecessor::Block { mut op, index } => {
154                log::trace!(target: "insert-spills", "redirecting {predecessor_block} to {split}");
155                let mut op = op.borrow_mut();
156                let mut succ = op.successor_mut(index as usize);
157                let prev_dest = succ.dest.parent().unwrap();
158                succ.dest.borrow_mut().set(split);
159                log::trace!(target: "insert-spills", "creating edge from {split} to {prev_dest}");
160                let arguments = succ
161                    .arguments
162                    .take()
163                    .into_iter()
164                    .map(|mut operand| {
165                        let mut operand = operand.borrow_mut();
166                        let value = operand.as_value_ref();
167                        // It is our responsibility to unlink the operands we removed from `succ`
168                        operand.unlink();
169                        value
170                    })
171                    .collect::<SmallVec<[_; 4]>>();
172                match split_info.point {
173                    ProgramPoint::Block { block, .. } => {
174                        assert_eq!(
175                            prev_dest, block,
176                            "unexpected mismatch between predecessor target and successor block"
177                        );
178                        interface.create_unconditional_branch(
179                            &mut builder,
180                            block,
181                            &arguments,
182                            op.span(),
183                        )?;
184                    }
185                    point => panic!(
186                        "unexpected program point for split: unstructured control flow requires a \
187                         block entry, got {point}"
188                    ),
189                }
190            }
191            Predecessor::Region(predecessor) => {
192                log::trace!(target: "insert-spills", "splitting region control flow edge to {} from {predecessor}", split_info.point);
193                todo!()
194            }
195            Predecessor::Parent => unimplemented!(
196                "support for splits on exit from region branch ops is not yet implemented"
197            ),
198        }
199    }
200
201    // Insert all spills
202    for spill in analysis.spills.iter_mut() {
203        let ip = match spill.place {
204            Placement::Split(split) => {
205                let split_block = analysis.splits[split.as_usize()]
206                    .split
207                    .expect("expected split to have been materialized");
208                let terminator = split_block.borrow().terminator().unwrap();
209                ProgramPoint::before(terminator)
210            }
211            Placement::At(ip) => ip,
212        };
213        log::trace!(target: "insert-spills", "inserting spill of {} at {ip}", spill.value);
214        builder.set_insertion_point(ip);
215        let inst = interface.create_spill(&mut builder, spill.value, spill.span)?;
216        spill.inst = Some(inst);
217    }
218
219    // Insert all reloads
220    for reload in analysis.reloads.iter_mut() {
221        let ip = match reload.place {
222            Placement::Split(split) => {
223                let split_block = analysis.splits[split.as_usize()]
224                    .split
225                    .expect("expected split to have been materialized");
226                let terminator = split_block.borrow().terminator().unwrap();
227                ProgramPoint::before(terminator)
228            }
229            Placement::At(ip) => ip,
230        };
231        log::trace!(target: "insert-spills", "inserting reload of {} at {ip}", reload.value);
232        builder.set_insertion_point(ip);
233        let inst = interface.create_reload(&mut builder, reload.value, reload.span)?;
234        reload.inst = Some(inst);
235    }
236
237    log::trace!(target: "insert-spills", "all spills and reloads inserted successfully");
238
239    let dominfo = analysis_manager.get_analysis::<DominanceInfo>()?;
240
241    let region = op.borrow().regions().front().as_pointer().unwrap();
242    if region.borrow().has_one_block() {
243        rewrite_single_block_spills(op, region, analysis, interface, analysis_manager)?;
244    } else {
245        rewrite_cfg_spills(
246            builder.context_rc(),
247            region,
248            analysis,
249            interface,
250            &dominfo,
251            analysis_manager,
252        )?;
253    }
254
255    Ok(PostPassStatus::Changed)
256}
257
258fn rewrite_single_block_spills(
259    op: OperationRef,
260    region: RegionRef,
261    analysis: &mut SpillAnalysis,
262    interface: &mut dyn TransformSpillsInterface,
263    _analysis_manager: AnalysisManager,
264) -> Result<(), Report> {
265    // In a flattened CFG with only structured control flow, no dominance tree is required.
266    //
267    // Instead, similar to a regular CFG, we walk the region graph in post-order, doing the
268    // following:
269    //
270    // 1. If we encounter a use of a spilled value, we add it to a use list
271    // 2. If we encounter a reloaded spill, we rewrite any uses found so far to use the reloaded
272    //    value
273    // 3. If we encounter a spill, then we clear the set of uses of that spill found so far and
274    //    continue
275    // 4. If we reach the top of a region's entry block, and the region has no predecessors other
276    //    than the containing operation, then we do nothing but continue the traversal.
277    // 5. If we reach the top of a region's entry block, and the region has multiple predecessors,
278    //    then for each spilled value for which we have found at least one use, we must insert a
279    //    new region argument representing the spilled value, and rewrite all uses to use that
280    //    argument instead. For any dominating predecessors, the original spilled value is passed
281    //    as the value of the new argument.
282
283    struct Node {
284        block: BlockRef,
285        cursor: Option<OperationRef>,
286        is_first_visit: bool,
287    }
288    impl Node {
289        pub fn new(block: BlockRef) -> Self {
290            Self {
291                block,
292                cursor: block.borrow().body().back().as_pointer(),
293                is_first_visit: true,
294            }
295        }
296
297        pub fn current(&self) -> Option<OperationRef> {
298            self.cursor
299        }
300
301        pub fn move_next(&mut self) -> Option<OperationRef> {
302            let next = self.cursor.take()?;
303            self.cursor = next.prev();
304            Some(next)
305        }
306    }
307
308    let mut block_states =
309        FxHashMap::<BlockRef, SmallDenseMap<ValueRef, SmallSet<OpOperand, 4>, 4>>::default();
310    let entry_block = region.borrow().entry_block_ref().unwrap();
311    let mut block_q = VecDeque::from([Node::new(entry_block)]);
312
313    while let Some(mut node) = block_q.pop_back() {
314        let Some(operation) = node.current() else {
315            // We've reached the top of the block, remove any uses of the block arguments, if they
316            // were spilled, as they represent the original definitions of those values.
317            let block = node.block.borrow();
318            let used = block_states.entry(node.block).or_default();
319            for arg in ValueRange::<2>::from(block.arguments()) {
320                if analysis.is_spilled(&arg) {
321                    used.remove(&arg);
322                }
323            }
324            continue;
325        };
326
327        let op = operation.borrow();
328        if let Some(branch) = op.as_trait::<dyn RegionBranchOpInterface>() {
329            // Before we process this op, we need to visit all if it's regions first, as rewriting
330            // those regions might introduce new region arguments that we must rewrite here. So,
331            // if this is our first visit to this op, we recursively visit its regions in postorder
332            // first, and then mark the op has visited. The next time we visit this op, we will
333            // skip this part, and proceed to handling uses/defs of spilled values at the op entry/
334            // exit.
335            if node.is_first_visit {
336                node.is_first_visit = false;
337                block_q.push_back(node);
338                for region in Region::postorder_region_graph_for(branch).into_iter().rev() {
339                    let region = region.borrow();
340                    assert!(
341                        region.has_one_block(),
342                        "multi-block regions are not currently supported"
343                    );
344                    let entry = region.entry();
345                    block_q.push_back(Node::new(entry.as_block_ref()));
346                }
347                continue;
348            } else {
349                // Process any uses in the entry regions of this op before proceeding
350                for region in branch.get_successor_regions(RegionBranchPoint::Parent) {
351                    let Some(region) = region.into_successor() else {
352                        continue;
353                    };
354
355                    let region_entry = region.borrow().entry_block_ref().unwrap();
356                    if let Some(uses) = block_states.remove(&region_entry) {
357                        let parent_uses = block_states.entry(node.block).or_default();
358                        for (spilled, users) in uses {
359                            // TODO(pauls): If `users` is non-empty, and `region` has multiple
360                            // predecessors, then we need to introduce a new region argument to
361                            // represent the definition of each spilled value from those
362                            // predecessors, and then rewrite the uses to use the new argument.
363                            let parent_users = parent_uses.entry(spilled).or_default();
364                            let merged = users.into_union(parent_users);
365                            *parent_users = merged;
366                        }
367                    }
368                }
369            }
370        }
371
372        let used = block_states.entry(node.block).or_default();
373
374        let reload_like = op.as_trait::<dyn ReloadLike>();
375        let is_reload_like = reload_like.is_some();
376        if let Some(reload_like) = reload_like {
377            // We've found a reload of a spilled value, rewrite all uses of the spilled value
378            // found so far to use the reload instead.
379            let spilled = reload_like.spilled_value();
380            let reloaded = reload_like.reloaded();
381
382            if let Some(to_rewrite) = used.remove(&spilled) {
383                debug_assert!(!to_rewrite.is_empty(), "expected empty use sets to be removed");
384
385                for mut user in to_rewrite {
386                    user.borrow_mut().set(reloaded);
387                }
388            } else {
389                // This reload is unused, so remove it entirely, and move to the next op
390                node.move_next();
391                continue;
392            }
393        }
394
395        // Advance the cursor in this block
396        node.move_next();
397
398        // Remove any use tracking for spilled values defined by this op
399        for result in ValueRange::<2>::from(op.results().all()) {
400            if analysis.is_spilled(&result) {
401                used.remove(&result);
402                continue;
403            }
404        }
405
406        // Record any uses of spilled values by this op, so long as the op is not reload-like
407        if !is_reload_like {
408            for operand in op.operands().iter().copied() {
409                let value = operand.borrow().as_value_ref();
410                if analysis.is_spilled(&value) {
411                    used.entry(value).or_default().insert(operand);
412                }
413            }
414        }
415    }
416
417    let context = { op.borrow().context_rc() };
418    rewrite_spill_pseudo_instructions(context, analysis, interface, None)
419}
420
421fn rewrite_cfg_spills(
422    context: Rc<Context>,
423    region: RegionRef,
424    analysis: &mut SpillAnalysis,
425    interface: &mut dyn TransformSpillsInterface,
426    dominfo: &DominanceInfo,
427    _analysis_manager: AnalysisManager,
428) -> Result<(), Report> {
429    // At this point, we've potentially emitted spills/reloads, but these are not yet being
430    // used to split the live ranges of the SSA values to which they apply. Our job now, is
431    // to walk the CFG bottom-up, finding uses of values for which we have issued reloads,
432    // and then looking for the dominating definition (either original, or reload) that controls
433    // that use, rewriting the use with the SSA value corresponding to the reloaded value.
434    //
435    // This has the effect of "reconstructing" the SSA form - although in our case it is more
436    // precise to say that we are fixing up the original program to reflect the live-range
437    // splits that we have computed (and inserted pseudo-instructions for). In the original
438    // paper, they actually had multiple definitions of reloaded SSA values, which is why
439    // this phase is referred to as "reconstructing", as it is intended to recover the SSA
440    // property that was lost once multiple definitions are introduced.
441    //
442    //   * For each original definition of a spilled value `v`, get the new definitions of `v`
443    //     (reloads) and the uses of `v`.
444    //   * For each use of `v`, walk the dominance tree upwards until a definition of `v` is
445    //     found that is responsible for that use. If an iterated dominance frontier is passed,
446    //     a block argument is inserted such that appropriate definitions from each predecessor
447    //     are wired up to that block argument, which is then the definition of `v` responsible
448    //     for subsequent uses. The predecessor instructions which branch to it are new uses
449    //     which we visit in the same manner as described above. After visiting all uses, any
450    //     definitions (reloads) which are dead will have no uses of the reloaded value, and can
451    //     thus be eliminated.
452
453    // We consume the spill analysis in this pass, as it will no longer be valid after this
454    let domtree = dominfo.dominance(region);
455    let domf = DominanceFrontier::new(&domtree);
456
457    // Make sure that any block in the iterated dominance frontier of a spilled value, has
458    // a new phi (block argument) inserted, if one is not already present. These must be in
459    // the CFG before we search for dominating definitions.
460    let inserted_phis = insert_required_phis(&context, analysis, &domf);
461
462    // Traverse the CFG bottom-up, doing the following along the way:
463    //
464    // 0. Merge the "used" sets of each successor of the current block (see remaining steps for
465    //    how the "used" set is computed for a block). NOTE: We elaborate in step 4 on how to
466    //    handle computing the "used" set for a successor, from the "used" set at the start of
467    //    the successor block.
468    // 1. If we encounter a use of a spilled value, record the location of that use in the set
469    // of uses we're seeking a dominating definition for, i.e. the "used" set
470    // 2. If we reach a definition for a value with uses in the "used" set:
471    //   * If the definition is the original definition of the value, no action is needed, so we
472    //     remove all uses of that value from the "used" set.
473    //   * If the definition is a reload, rewrite all of the uses in the "used" set to use the
474    //     reload instead, removing them from the "used" set. Mark the reload used.
475    // 3. When we reach the start of the block, the state of the "used" set is associated with
476    //    the current block. This will be used as the starting state of the "used" set in each
477    //    predecessor of the block
478    // 4. When computing the "used" set in the predecessor (i.e. step 0), we also check whether
479    //    a given successor is in the iterated dominance frontier for any values in the "used"
480    //    set of that successor. If so, we need to insert a block parameter for each such value,
481    //    rewrite all uses of that value to use the new block parameter, and add the "used"
482    //    value as an additional argument to that successor. The resulting "used" set will thus
483    //    retain a single entry for each of the values for which uses were rewritten
484    //    (corresponding to the block arguments for the successor), but all of the uses
485    //    dominated by the introduced block parameter are no longer in the set, as their
486    //    dominating definition has been found. Any values in the "used" set for which the
487    //    successor is not in the iterated dominance frontier for that value, are retained in
488    //    the "used" set without any changes.
489    let mut used_sets =
490        SmallDenseMap::<BlockRef, SmallDenseMap<ValueRef, SmallSet<OpOperand, 8>, 8>, 8>::default();
491    let mut block_q = VecDeque::from(domtree.postorder());
492    while let Some(node) = block_q.pop_front() {
493        let Some(block_ref) = node.block() else {
494            continue;
495        };
496
497        // Compute the initial "used" set for this block
498        let mut used = SmallDenseMap::<ValueRef, SmallSet<OpOperand, 8>, 8>::default();
499        for succ in Rc::<DomTreeNode>::children(node) {
500            let Some(succ_block) = succ.block() else {
501                continue;
502            };
503
504            if let Some(usages) = used_sets.get_mut(&succ_block) {
505                // Union the used set from this successor with the others
506                for (value, users) in usages.iter() {
507                    used.entry(*value).or_default().extend(users.iter().copied());
508                }
509            }
510        }
511
512        // Traverse the block bottom-up, recording uses of spilled values while looking for
513        // definitions
514        let block = block_ref.borrow();
515        for op in block.body().iter().rev() {
516            find_inst_uses(&op, &mut used, analysis);
517        }
518
519        // At the top of the block, if any of the block parameters are in the "used" set, remove
520        // those uses, as the block parameters are the original definitions for those values, and
521        // thus no rewrite is needed.
522        for arg in ValueRange::<2>::from(block.arguments()) {
523            used.remove(&arg);
524        }
525
526        rewrite_inserted_phi_uses(&inserted_phis, block_ref, &mut used);
527
528        // What remains are the unsatisfied uses of spilled values for this block and its
529        // successors
530        used_sets.insert(block_ref, used);
531    }
532
533    rewrite_spill_pseudo_instructions(context, analysis, interface, Some(dominfo))
534}
535
536/// Insert additional phi nodes as follows:
537///
538/// 1. For each spilled value V
539/// 2. Obtain the set of blocks, R, containing a reload of V
540/// 3. For each block B in the iterated dominance frontier of R, insert a phi in B for V
541/// 4. For every predecessor of B, append a new block argument to B, passing V initially
542/// 5. Traverse the CFG bottom-up, finding uses of V, until we reach an inserted phi, a reload, or
543///    the original definition. Rewrite all found uses of V up to that point, to use this
544///    definition.
545fn insert_required_phis(
546    context: &Context,
547    analysis: &SpillAnalysis,
548    domf: &DominanceFrontier,
549) -> SmallDenseMap<BlockRef, SmallDenseMap<ValueRef, ValueRef, 8>, 8> {
550    use midenc_hir::adt::smallmap::Entry;
551
552    let mut required_phis = SmallDenseMap::<ValueRef, SmallSet<BlockRef, 2>, 4>::default();
553    for reload in analysis.reloads() {
554        let block = reload.inst.unwrap().parent().unwrap();
555        let r = required_phis.entry(reload.value).or_default();
556        r.insert(block);
557    }
558
559    let mut inserted_phis =
560        SmallDenseMap::<BlockRef, SmallDenseMap<ValueRef, ValueRef, 8>, 8>::default();
561    for (value, domf_r) in required_phis {
562        // Compute the iterated dominance frontier, DF+(R)
563        let idf_r = domf.iterate_all(domf_r);
564        // Add phi to each B in DF+(R)
565        let (ty, span) = {
566            let value = value.borrow();
567            (value.ty().clone(), value.span())
568        };
569        for mut b in idf_r {
570            // Allocate new block parameter/phi, if one is not already present
571            let phis = inserted_phis.entry(b).or_default();
572            if let Entry::Vacant(entry) = phis.entry(value) {
573                let phi = context.append_block_argument(b, ty.clone(), span);
574                entry.insert(phi);
575
576                // Append `value` as new argument to every predecessor to satisfy new parameter
577                let block = b.borrow_mut();
578                let mut next_use = block.uses().front().as_pointer();
579                while let Some(pred) = next_use.take() {
580                    next_use = pred.next();
581
582                    let (mut predecessor, successor_index) = {
583                        let pred = pred.borrow();
584                        (pred.owner, pred.index as usize)
585                    };
586                    let operand = context.make_operand(value, predecessor, 0);
587                    predecessor.borrow_mut().successor_mut(successor_index).arguments.push(operand);
588                }
589            }
590        }
591    }
592
593    inserted_phis
594}
595
596fn find_inst_uses(
597    op: &Operation,
598    used: &mut SmallDenseMap<ValueRef, SmallSet<OpOperand, 8>, 8>,
599    analysis: &SpillAnalysis,
600) {
601    let reload_like = op.as_trait::<dyn ReloadLike>();
602    let is_reload = reload_like.is_some();
603    if let Some(reload_like) = reload_like {
604        // We have found a new definition for a spilled value, we must rewrite all uses of the
605        // spilled value found so far, with the reloaded value.
606        let spilled = reload_like.spilled_value();
607        let reloaded = reload_like.reloaded();
608
609        if let Some(to_rewrite) = used.remove(&spilled) {
610            debug_assert!(!to_rewrite.is_empty(), "expected empty use sets to be removed");
611
612            for mut user in to_rewrite {
613                user.borrow_mut().set(reloaded);
614            }
615        } else {
616            // This reload is unused, so remove it entirely, and move to the next op
617            return;
618        }
619    }
620
621    for result in ValueRange::<2>::from(op.results().all()) {
622        if analysis.is_spilled(&result) {
623            // This op is the original definition for a spilled value, so remove any
624            // uses of it we've accumulated so far, as they do not need to be rewritten
625            used.remove(&result);
626        }
627    }
628
629    // Record any uses of spilled values in the argument list for `op`, but ignore reload-likes
630    if !is_reload {
631        for operand in op.operands().iter().copied() {
632            let value = operand.borrow().as_value_ref();
633            if analysis.is_spilled(&value) {
634                used.entry(value).or_default().insert(operand);
635            }
636        }
637    }
638}
639
640fn rewrite_inserted_phi_uses(
641    inserted_phis: &SmallDenseMap<BlockRef, SmallDenseMap<ValueRef, ValueRef, 8>, 8>,
642    block_ref: BlockRef,
643    used: &mut SmallDenseMap<ValueRef, SmallSet<OpOperand, 8>, 8>,
644) {
645    // If we have inserted any phis in this block, rewrite uses of the spilled values they
646    // represent.
647    if let Some(phis) = inserted_phis.get(&block_ref) {
648        for (spilled, phi) in phis.iter() {
649            if let Some(to_rewrite) = used.remove(spilled) {
650                debug_assert!(!to_rewrite.is_empty(), "expected empty use sets to be removed");
651
652                for mut user in to_rewrite {
653                    user.borrow_mut().set(*phi);
654                }
655            } else {
656                // TODO(pauls): This phi is unused, we should be able to remove it
657                log::warn!(target: "insert-spills", "unused phi {phi} encountered during rewrite phase");
658                continue;
659            }
660        }
661    }
662}
663
664/// For each spilled value, allocate a procedure local, rewrite the spill instruction as a
665/// `local.store`, unless the spill is dead, in which case we remove the spill entirely.
666///
667/// Dead spills can occur because the spills analysis must conservatively place them to
668/// ensure that all paths to a block where a value has been spilled along at least one
669/// of those paths, gets spilled on all of them, by inserting extra spills along those
670/// edges where a spill hasn't occurred yet.
671///
672/// However, this produces dead spills on some paths through the function, which are not
673/// needed once rewrites have been performed. So we eliminate dead spills by identifying
674/// those spills which do not dominate any reloads - if a store to a spill slot can never
675/// be read, then the store can be elided.
676fn rewrite_spill_pseudo_instructions(
677    context: Rc<Context>,
678    analysis: &mut SpillAnalysis,
679    interface: &mut dyn TransformSpillsInterface,
680    dominfo: Option<&DominanceInfo>,
681) -> Result<(), Report> {
682    use midenc_hir::{
683        dominance::Dominates,
684        patterns::{NoopRewriterListener, RewriterImpl},
685    };
686
687    let mut builder = RewriterImpl::<NoopRewriterListener>::new(context);
688    for spill in analysis.spills() {
689        let operation = spill.inst.expect("expected spill to have been materialized");
690        let op = operation.borrow();
691        let spill_like = op
692            .as_trait::<dyn SpillLike>()
693            .expect("expected materialized spill operation to implement SpillLike");
694        // The current SSA value representing the original spilled value at this point in the
695        // program
696        let stored = spill_like.spilled_value();
697        // This spill is used if it properly dominates any reload of `stored` (and all uses should
698        // be reloads)
699        //
700        // If we have no dominance info, the spill is presumed used
701        let is_used = dominfo.is_none_or(|dominfo| {
702            stored.borrow().uses().iter().any(|user| {
703                let used_by = user.owner.borrow();
704                debug_assert!(
705                    user.owner == operation || used_by.implements::<dyn ReloadLike>(),
706                    "unexpected non-reload use of spilled value"
707                );
708                op.dominates(&used_by, dominfo)
709            })
710        });
711        drop(op);
712
713        if is_used {
714            builder.set_insertion_point_after(operation);
715            interface.convert_spill_to_store(&mut builder, operation)?;
716        } else {
717            builder.erase_op(operation);
718        }
719    }
720
721    // Rewrite all used reload instructions as `local.load` instructions from the corresponding
722    // procedure local
723    for reload in analysis.reloads() {
724        let operation = reload.inst.expect("expected reload to have been materialized");
725        let op = operation.borrow();
726        let reload_like = op
727            .as_trait::<dyn ReloadLike>()
728            .expect("expected materialized reload op to implement ReloadLike");
729        let is_used = reload_like.reloaded().borrow().is_used();
730        drop(op);
731
732        // Avoid emitting loads for unused reloads
733        if is_used {
734            builder.set_insertion_point_after(operation);
735            interface.convert_reload_to_load(&mut builder, operation)?;
736        } else {
737            builder.erase_op(operation);
738        }
739    }
740
741    Ok(())
742}