use std::collections::HashMap;
use crate::analysis::{Analyzer, DataflowAnalyzer, Direction, Edge, StatementLocation};
use crate::{Block, BlockEnd, BlockId, Lowered, MatchInfo, Statement, VarRemapping, VarUsage};
pub struct BackAnalysis<'db, 'a, TAnalyzer: Analyzer<'db, 'a>> {
lowered: &'a Lowered<'db>,
pub analyzer: TAnalyzer,
block_info: HashMap<BlockId, TAnalyzer::Info>,
}
impl<'db, 'a, TAnalyzer: Analyzer<'db, 'a>> BackAnalysis<'db, 'a, TAnalyzer> {
pub fn new(lowered: &'a Lowered<'db>, analyzer: TAnalyzer) -> Self {
Self { lowered, analyzer, block_info: Default::default() }
}
pub fn get_root_info(&mut self) -> TAnalyzer::Info {
let mut dfs_stack = vec![BlockId::root()];
while let Some(block_id) = dfs_stack.last() {
let end = &self.lowered.blocks[*block_id].end;
if !self.add_missing_dependency_blocks(&mut dfs_stack, end) {
self.calc_block_info(dfs_stack.pop().unwrap());
}
}
self.block_info.remove(&BlockId::root()).unwrap()
}
fn calc_block_info(&mut self, block_id: BlockId) {
let mut info = self.get_end_info(block_id);
for (i, stmt) in self.lowered.blocks[block_id].statements.iter().enumerate().rev() {
let statement_location = (block_id, i);
self.analyzer.visit_stmt(&mut info, statement_location, stmt);
}
self.analyzer.visit_block_start(&mut info, block_id, &self.lowered.blocks[block_id]);
self.block_info.insert(block_id, info);
}
fn add_missing_dependency_blocks(
&self,
dfs_stack: &mut Vec<BlockId>,
block_end: &'a BlockEnd<'_>,
) -> bool {
match block_end {
BlockEnd::NotSet => unreachable!(),
BlockEnd::Goto(target_block_id, _)
if !self.block_info.contains_key(target_block_id) =>
{
dfs_stack.push(*target_block_id);
true
}
BlockEnd::Goto(_, _) | BlockEnd::Return(..) | BlockEnd::Panic(_) => false,
BlockEnd::Match { info } => {
let mut missing_cache = false;
for arm in info.arms() {
if !self.block_info.contains_key(&arm.block_id) {
dfs_stack.push(arm.block_id);
missing_cache = true;
}
}
missing_cache
}
}
}
fn get_end_info(&mut self, block_id: BlockId) -> TAnalyzer::Info {
let block_end = &self.lowered.blocks[block_id].end;
let statement_location = (block_id, self.lowered.blocks[block_id].statements.len());
match block_end {
BlockEnd::NotSet => unreachable!(),
BlockEnd::Goto(target_block_id, remapping) => {
let mut info = self.block_info[target_block_id].clone();
self.analyzer.visit_goto(
&mut info,
statement_location,
*target_block_id,
remapping,
);
info
}
BlockEnd::Return(vars, _location) => {
self.analyzer.info_from_return(statement_location, vars)
}
BlockEnd::Panic(data) => self.analyzer.info_from_panic(statement_location, data),
BlockEnd::Match { info } => {
let arm_infos =
info.arms().iter().map(|arm| self.block_info.remove(&arm.block_id).unwrap());
self.analyzer.merge_match(statement_location, info, arm_infos)
}
}
}
}
pub struct DataflowBackAnalysis<'db, 'a, TAnalyzer: DataflowAnalyzer<'db, 'a>> {
inner: BackAnalysis<'db, 'a, AnalyzerAdapter<'db, 'a, TAnalyzer>>,
}
impl<'db, 'a, TAnalyzer: DataflowAnalyzer<'db, 'a>> DataflowBackAnalysis<'db, 'a, TAnalyzer> {
pub fn new(lowered: &'a Lowered<'db>, analyzer: &'a mut TAnalyzer) -> Self {
assert!(
TAnalyzer::DIRECTION == Direction::Backward,
"DataflowBackAnalysis requires a backward analyzer"
);
let adapter = AnalyzerAdapter { analyzer, lowered };
Self { inner: BackAnalysis::new(lowered, adapter) }
}
pub fn run(mut self) -> TAnalyzer::Info {
self.inner.get_root_info()
}
}
pub struct AnalyzerAdapter<'db, 'a, TAnalyzer: DataflowAnalyzer<'db, 'a>> {
pub analyzer: &'a mut TAnalyzer,
lowered: &'a Lowered<'db>,
}
impl<'db, 'a, TAnalyzer: DataflowAnalyzer<'db, 'a>> Analyzer<'db, 'a>
for AnalyzerAdapter<'db, 'a, TAnalyzer>
{
type Info = TAnalyzer::Info;
fn visit_block_start(&mut self, info: &mut Self::Info, block_id: BlockId, _block: &Block<'db>) {
let block = &self.lowered.blocks[block_id];
self.analyzer.transfer_block(info, block_id, block);
self.analyzer.visit_block_start(info, block_id, block);
}
fn visit_stmt(
&mut self,
_info: &mut Self::Info,
_statement_location: StatementLocation,
_stmt: &'a Statement<'db>,
) {
}
fn visit_goto(
&mut self,
info: &mut Self::Info,
_statement_location: StatementLocation,
target_block_id: BlockId,
remapping: &'a VarRemapping<'db>,
) {
let edge = Edge::Goto { target: target_block_id, remapping };
*info = self.analyzer.transfer_edge(info, &edge);
}
fn merge_match(
&mut self,
statement_location: StatementLocation,
match_info: &'a MatchInfo<'db>,
infos: impl Iterator<Item = Self::Info>,
) -> Self::Info {
let mut iter = match_info.arms().iter().zip(infos);
let transformer = |analyzer: &mut TAnalyzer, info, arm| {
let edge = Edge::MatchArm { arm, match_info };
analyzer.transfer_edge(&info, &edge)
};
let Some(first) = iter.next().map(|(arm, info)| transformer(self.analyzer, info, arm))
else {
return self.analyzer.initial_info(
statement_location.0,
&self.lowered.blocks[statement_location.0].end,
);
};
iter.fold(first, |a, b| {
let b = transformer(self.analyzer, b.1, b.0);
self.analyzer.merge(self.lowered, statement_location, a, b)
})
}
fn info_from_return(
&mut self,
statement_location: StatementLocation,
vars: &'a [VarUsage<'db>],
) -> Self::Info {
let block_end = &self.lowered.blocks[statement_location.0].end;
let BlockEnd::Return(_, location) = block_end else {
unreachable!("Unexpected block end type")
};
let location = *location;
let info = self.analyzer.initial_info(statement_location.0, block_end);
self.analyzer.transfer_edge(&info, &Edge::Return { vars, location })
}
fn info_from_panic(
&mut self,
statement_location: StatementLocation,
var: &VarUsage<'db>,
) -> Self::Info {
let block_end = &self.lowered.blocks[statement_location.0].end;
let info = self.analyzer.initial_info(statement_location.0, block_end);
self.analyzer.transfer_edge(&info, &Edge::Panic { var: *var })
}
}