use crate::cfg::Cfg;
use crate::labels::{Cap, DataLabel, SourceKind};
use crate::ssa::{SsaBody, SsaOp, SsaValue};
use crate::summary::GlobalSummaries;
use crate::symbol::{FuncKey, Lang};
use crate::taint::Finding;
use crate::taint::ssa_transfer::CalleeSsaBody;
use petgraph::graph::NodeIndex;
use smallvec::SmallVec;
use std::collections::{HashMap, HashSet};
pub const DEFAULT_BACKWARDS_DEPTH: u32 = 2;
pub const BACKWARDS_VALUE_BUDGET: u32 = 1024;
pub const MAX_BACKWARDS_CALLEE_BLOCKS: usize = 500;
#[derive(Clone, Debug, Default)]
pub struct DemandState {
pub caps: Cap,
pub validated_true: u8,
pub validated_false: u8,
pub depth: u32,
}
impl DemandState {
pub fn new(caps: Cap) -> Self {
Self {
caps,
validated_true: 0,
validated_false: 0,
depth: 0,
}
}
}
#[derive(Clone, Debug)]
pub struct BackwardFlow {
pub sink_value: SsaValue,
pub sink_node: NodeIndex,
pub sink_caps: Cap,
pub source_kind: Option<SourceKind>,
pub source_node: Option<NodeIndex>,
pub infeasible: bool,
pub budget_exhausted: bool,
pub max_depth: u32,
pub chain: SmallVec<[SsaValue; 8]>,
}
impl BackwardFlow {
pub fn is_confirmation(&self) -> bool {
self.source_kind.is_some() && !self.infeasible && !self.budget_exhausted
}
}
pub const MAX_CHAIN_LEN: usize = 16;
pub struct BackwardsCtx<'a> {
pub ssa: &'a SsaBody,
pub cfg: &'a Cfg,
pub lang: Lang,
pub global_summaries: Option<&'a GlobalSummaries>,
pub intra_file_bodies: Option<&'a HashMap<FuncKey, CalleeSsaBody>>,
pub depth_budget: u32,
}
impl<'a> BackwardsCtx<'a> {
pub fn new(ssa: &'a SsaBody, cfg: &'a Cfg, lang: Lang) -> Self {
Self {
ssa,
cfg,
lang,
global_summaries: None,
intra_file_bodies: None,
depth_budget: DEFAULT_BACKWARDS_DEPTH,
}
}
}
pub fn backward_transfer(
ssa: &SsaBody,
value: SsaValue,
demand: &DemandState,
) -> (BackwardStep, SmallVec<[(SsaValue, DemandState); 4]>) {
let def = ssa.def_of(value);
let block = &ssa.blocks[def.block.0 as usize];
let op = block
.phis
.iter()
.chain(block.body.iter())
.find(|i| i.value == value)
.map(|i| &i.op);
let op = match op {
Some(o) => o,
None => return (BackwardStep::Unknown, SmallVec::new()),
};
match op {
SsaOp::Source => (BackwardStep::ReachedSource(def.cfg_node), SmallVec::new()),
SsaOp::Const(_) => (BackwardStep::ReachedConst, SmallVec::new()),
SsaOp::Param { index } => (
BackwardStep::ReachedParam {
index: *index,
node: def.cfg_node,
},
SmallVec::new(),
),
SsaOp::SelfParam => (
BackwardStep::ReachedParam {
index: 0,
node: def.cfg_node,
},
SmallVec::new(),
),
SsaOp::CatchParam => (BackwardStep::ReachedCatchParam, SmallVec::new()),
SsaOp::Nop => (BackwardStep::Unknown, SmallVec::new()),
SsaOp::Undef => (BackwardStep::ReachedConst, SmallVec::new()),
SsaOp::Phi(operands) => {
let mut next: SmallVec<[(SsaValue, DemandState); 4]> = SmallVec::new();
for (_pred_block, pred_value) in operands {
next.push((*pred_value, demand.clone()));
}
(BackwardStep::Phi, next)
}
SsaOp::Assign(operands) => {
let mut next: SmallVec<[(SsaValue, DemandState); 4]> = SmallVec::new();
for op in operands {
next.push((*op, demand.clone()));
}
(BackwardStep::Assign, next)
}
SsaOp::Call {
callee,
args,
receiver,
..
} => {
let mut flat: SmallVec<[(SsaValue, DemandState); 4]> = SmallVec::new();
if let Some(r) = receiver {
flat.push((*r, demand.clone()));
}
for arg_uses in args {
for u in arg_uses {
flat.push((*u, demand.clone()));
}
}
(
BackwardStep::Call {
callee: callee.clone(),
},
flat,
)
}
SsaOp::FieldProj { receiver, .. } => {
let mut next: SmallVec<[(SsaValue, DemandState); 4]> = SmallVec::new();
next.push((*receiver, demand.clone()));
(BackwardStep::Assign, next)
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum BackwardStep {
ReachedSource(NodeIndex),
ReachedConst,
ReachedParam { index: usize, node: NodeIndex },
ReachedCatchParam,
Phi,
Assign,
Call { callee: String },
Unknown,
}
pub fn analyse_sink_backwards(
ctx: &BackwardsCtx<'_>,
sink_value: SsaValue,
sink_node: NodeIndex,
sink_caps: Cap,
) -> Vec<BackwardFlow> {
let mut out = Vec::new();
let mut visited: HashSet<SsaValue> = HashSet::new();
let mut budget: u32 = 0;
let initial_demand = DemandState::new(sink_caps);
let mut chain: SmallVec<[SsaValue; 8]> = SmallVec::new();
chain.push(sink_value);
walk_dfs(
ctx,
sink_value,
sink_node,
sink_caps,
&initial_demand,
&mut visited,
&mut budget,
&mut chain,
&mut out,
);
out
}
#[allow(clippy::too_many_arguments)]
fn walk_dfs(
ctx: &BackwardsCtx<'_>,
value: SsaValue,
sink_node: NodeIndex,
sink_caps: Cap,
demand: &DemandState,
visited: &mut HashSet<SsaValue>,
budget: &mut u32,
chain: &mut SmallVec<[SsaValue; 8]>,
out: &mut Vec<BackwardFlow>,
) {
if *budget >= BACKWARDS_VALUE_BUDGET {
out.push(BackwardFlow {
sink_value: chain.first().copied().unwrap_or(value),
sink_node,
sink_caps,
source_kind: None,
source_node: None,
infeasible: false,
budget_exhausted: true,
max_depth: demand.depth,
chain: clip_chain(chain),
});
return;
}
*budget += 1;
if !visited.insert(value) {
return; }
let def_cfg_node = ctx.ssa.def_of(value).cfg_node;
if def_cfg_node.index() < ctx.cfg.node_count() {
let info = &ctx.cfg[def_cfg_node];
let source_cap_match = info
.taint
.labels
.iter()
.any(|l| matches!(l, DataLabel::Source(c) if !(*c & sink_caps).is_empty()));
if source_cap_match {
let source_kind = classify_source_kind(ctx, def_cfg_node, sink_caps);
out.push(BackwardFlow {
sink_value: chain.first().copied().unwrap_or(value),
sink_node,
sink_caps,
source_kind: Some(source_kind),
source_node: Some(def_cfg_node),
infeasible: false,
budget_exhausted: false,
max_depth: demand.depth,
chain: clip_chain(chain),
});
return;
}
}
let (step, next) = backward_transfer(ctx.ssa, value, demand);
match step {
BackwardStep::ReachedSource(node) => {
let source_kind = classify_source_kind(ctx, node, sink_caps);
out.push(BackwardFlow {
sink_value: chain.first().copied().unwrap_or(value),
sink_node,
sink_caps,
source_kind: Some(source_kind),
source_node: Some(node),
infeasible: false,
budget_exhausted: false,
max_depth: demand.depth,
chain: clip_chain(chain),
});
}
BackwardStep::ReachedConst => {
}
BackwardStep::ReachedParam { index: _, node } => {
out.push(BackwardFlow {
sink_value: chain.first().copied().unwrap_or(value),
sink_node,
sink_caps,
source_kind: None,
source_node: Some(node),
infeasible: false,
budget_exhausted: false,
max_depth: demand.depth,
chain: clip_chain(chain),
});
}
BackwardStep::ReachedCatchParam => {
}
BackwardStep::Phi | BackwardStep::Assign => {
for (operand, next_demand) in next {
chain.push(operand);
walk_dfs(
ctx,
operand,
sink_node,
sink_caps,
&next_demand,
visited,
budget,
chain,
out,
);
chain.pop();
}
}
BackwardStep::Call { callee } => {
let resolved = resolve_callee_body(ctx, &callee, demand.depth);
if let Some((callee_body, callee_key)) = resolved {
let mut callee_visited: HashSet<SsaValue> = HashSet::new();
let mut callee_budget: u32 = 0;
let callee_ctx = BackwardsCtx {
ssa: &callee_body.ssa,
cfg: callee_body.body_graph.as_ref().unwrap_or(ctx.cfg),
lang: ctx.lang,
global_summaries: ctx.global_summaries,
intra_file_bodies: ctx.intra_file_bodies,
depth_budget: ctx.depth_budget,
};
let mut callee_demand = demand.clone();
callee_demand.depth += 1;
for block in &callee_body.ssa.blocks {
if let crate::ssa::ir::Terminator::Return(Some(ret_val)) = &block.terminator {
walk_dfs(
&callee_ctx,
*ret_val,
sink_node,
sink_caps,
&callee_demand,
&mut callee_visited,
&mut callee_budget,
chain,
out,
);
}
}
let _ = callee_key;
return;
}
for (operand, next_demand) in next {
chain.push(operand);
walk_dfs(
ctx,
operand,
sink_node,
sink_caps,
&next_demand,
visited,
budget,
chain,
out,
);
chain.pop();
}
}
BackwardStep::Unknown => {
}
}
}
fn resolve_callee_body<'a>(
ctx: &BackwardsCtx<'a>,
callee: &str,
current_depth: u32,
) -> Option<(&'a CalleeSsaBody, FuncKey)> {
if current_depth >= ctx.depth_budget {
return None;
}
let leaf = callee
.rsplit("::")
.next()
.unwrap_or(callee)
.rsplit('.')
.next()
.unwrap_or(callee);
if let Some(map) = ctx.intra_file_bodies {
for (key, body) in map.iter() {
if key.name == leaf && body.ssa.blocks.len() <= MAX_BACKWARDS_CALLEE_BLOCKS {
return Some((body, key.clone()));
}
}
}
if let Some(map) = ctx.global_summaries.and_then(|gs| gs.bodies_by_key()) {
for (key, body) in map.iter() {
if key.name == leaf && body.ssa.blocks.len() <= MAX_BACKWARDS_CALLEE_BLOCKS {
return Some((body, key.clone()));
}
}
}
None
}
fn classify_source_kind(ctx: &BackwardsCtx<'_>, node: NodeIndex, sink_caps: Cap) -> SourceKind {
if node.index() >= ctx.cfg.node_count() {
return SourceKind::Unknown;
}
let info = &ctx.cfg[node];
let caps_match = info
.taint
.labels
.iter()
.any(|l| matches!(l, DataLabel::Source(c) if !(*c & sink_caps).is_empty()));
if caps_match {
let callee_str = info.call.callee.as_deref().unwrap_or("");
crate::labels::infer_source_kind(sink_caps, callee_str)
} else {
SourceKind::Unknown
}
}
fn clip_chain(chain: &SmallVec<[SsaValue; 8]>) -> SmallVec<[SsaValue; 8]> {
if chain.len() <= MAX_CHAIN_LEN {
return chain.clone();
}
let tail = &chain[chain.len() - (MAX_CHAIN_LEN - 1)..];
let mut out: SmallVec<[SsaValue; 8]> = SmallVec::new();
out.push(chain[0]);
out.extend_from_slice(tail);
out
}
pub const NOTE_CONFIRMED: &str = "backwards-confirmed";
pub const NOTE_INFEASIBLE: &str = "backwards-infeasible";
pub const NOTE_BUDGET: &str = "backwards-budget-exhausted";
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FindingVerdict {
Confirmed,
Inconclusive,
Infeasible,
BudgetExhausted,
}
pub fn aggregate_verdict(flows: &[BackwardFlow]) -> FindingVerdict {
if flows.iter().any(|f| f.is_confirmation()) {
return FindingVerdict::Confirmed;
}
if flows.iter().all(|f| f.infeasible) && !flows.is_empty() {
return FindingVerdict::Infeasible;
}
if flows.iter().any(|f| f.budget_exhausted) {
return FindingVerdict::BudgetExhausted;
}
FindingVerdict::Inconclusive
}
pub fn annotate_finding(finding: &mut Finding, verdict: FindingVerdict) {
let note = match verdict {
FindingVerdict::Confirmed => NOTE_CONFIRMED,
FindingVerdict::Infeasible => NOTE_INFEASIBLE,
FindingVerdict::BudgetExhausted => NOTE_BUDGET,
FindingVerdict::Inconclusive => return,
};
let sv = finding
.symbolic
.get_or_insert(crate::evidence::SymbolicVerdict {
verdict: crate::evidence::Verdict::NotAttempted,
constraints_checked: 0,
paths_explored: 0,
witness: None,
interproc_call_chains: Vec::new(),
cutoff_notes: Vec::new(),
});
if !sv.cutoff_notes.iter().any(|n| n == note) {
sv.cutoff_notes.push(note.to_string());
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cfg::{EdgeKind, NodeInfo};
use crate::ssa::ir::{BlockId, SsaBlock, SsaInst, Terminator, ValueDef};
use petgraph::Graph;
use petgraph::graph::NodeIndex;
use smallvec::smallvec;
fn make_value_def(block: BlockId, cfg_node: NodeIndex) -> ValueDef {
ValueDef {
var_name: None,
cfg_node,
block,
}
}
fn build_trivial_source_body() -> (SsaBody, Cfg) {
let mut cfg: Graph<NodeInfo, EdgeKind> = Graph::new();
let src_node = cfg.add_node(NodeInfo::default());
let use_node = cfg.add_node(NodeInfo::default());
let source_val = SsaValue(0);
let user_val = SsaValue(1);
let block = SsaBlock {
id: BlockId(0),
phis: vec![],
body: vec![
SsaInst {
value: source_val,
op: SsaOp::Source,
cfg_node: src_node,
var_name: None,
span: (0, 0),
},
SsaInst {
value: user_val,
op: SsaOp::Assign(smallvec![source_val]),
cfg_node: use_node,
var_name: None,
span: (0, 0),
},
],
terminator: Terminator::Return(Some(user_val)),
preds: SmallVec::new(),
succs: SmallVec::new(),
};
let ssa = SsaBody {
blocks: vec![block],
entry: BlockId(0),
value_defs: vec![
make_value_def(BlockId(0), src_node),
make_value_def(BlockId(0), use_node),
],
cfg_node_map: std::collections::HashMap::new(),
exception_edges: Vec::new(),
field_interner: crate::ssa::ir::FieldInterner::default(),
field_writes: std::collections::HashMap::new(),
synthetic_externals: std::collections::HashSet::new(),
};
(ssa, cfg)
}
#[test]
fn demand_state_new_sets_caps() {
let d = DemandState::new(Cap::SQL_QUERY);
assert_eq!(d.caps, Cap::SQL_QUERY);
assert_eq!(d.depth, 0);
assert_eq!(d.validated_true, 0);
assert_eq!(d.validated_false, 0);
}
#[test]
fn demand_state_roundtrips_data_exfil_cap() {
let d = DemandState::new(Cap::DATA_EXFIL);
assert_eq!(d.caps, Cap::DATA_EXFIL);
assert!(d.caps.contains(Cap::DATA_EXFIL));
let combined = DemandState::new(Cap::DATA_EXFIL | Cap::SSRF);
assert!(combined.caps.contains(Cap::DATA_EXFIL));
assert!(combined.caps.contains(Cap::SSRF));
}
#[test]
fn driver_walks_data_exfil_source_to_sink() {
let (ssa, mut cfg) = build_trivial_source_body();
let src_node = NodeIndex::new(0);
cfg[src_node]
.taint
.labels
.push(DataLabel::Source(Cap::DATA_EXFIL));
let ctx = BackwardsCtx::new(&ssa, &cfg, Lang::JavaScript);
let flows = analyse_sink_backwards(&ctx, SsaValue(1), NodeIndex::new(1), Cap::DATA_EXFIL);
assert_eq!(flows.len(), 1, "exactly one DATA_EXFIL flow expected");
assert!(flows[0].is_confirmation(), "must confirm at the source");
assert_eq!(flows[0].sink_caps, Cap::DATA_EXFIL);
}
#[test]
fn backward_transfer_source_terminates() {
let (ssa, _cfg) = build_trivial_source_body();
let demand = DemandState::new(Cap::all());
let (step, next) = backward_transfer(&ssa, SsaValue(0), &demand);
assert_eq!(next.len(), 0);
matches!(step, BackwardStep::ReachedSource(_));
}
#[test]
fn backward_transfer_const_terminates() {
let mut cfg: Graph<NodeInfo, EdgeKind> = Graph::new();
let c_node = cfg.add_node(NodeInfo::default());
let ssa = SsaBody {
blocks: vec![SsaBlock {
id: BlockId(0),
phis: vec![],
body: vec![SsaInst {
value: SsaValue(0),
op: SsaOp::Const(None),
cfg_node: c_node,
var_name: None,
span: (0, 0),
}],
terminator: Terminator::Return(Some(SsaValue(0))),
preds: SmallVec::new(),
succs: SmallVec::new(),
}],
entry: BlockId(0),
value_defs: vec![make_value_def(BlockId(0), c_node)],
cfg_node_map: std::collections::HashMap::new(),
exception_edges: Vec::new(),
field_interner: crate::ssa::ir::FieldInterner::default(),
field_writes: std::collections::HashMap::new(),
synthetic_externals: std::collections::HashSet::new(),
};
let demand = DemandState::new(Cap::all());
let (step, next) = backward_transfer(&ssa, SsaValue(0), &demand);
assert!(next.is_empty());
assert_eq!(step, BackwardStep::ReachedConst);
}
#[test]
fn backward_transfer_param_terminates() {
let mut cfg: Graph<NodeInfo, EdgeKind> = Graph::new();
let p_node = cfg.add_node(NodeInfo::default());
let ssa = SsaBody {
blocks: vec![SsaBlock {
id: BlockId(0),
phis: vec![],
body: vec![SsaInst {
value: SsaValue(0),
op: SsaOp::Param { index: 2 },
cfg_node: p_node,
var_name: None,
span: (0, 0),
}],
terminator: Terminator::Return(Some(SsaValue(0))),
preds: SmallVec::new(),
succs: SmallVec::new(),
}],
entry: BlockId(0),
value_defs: vec![make_value_def(BlockId(0), p_node)],
cfg_node_map: std::collections::HashMap::new(),
exception_edges: Vec::new(),
field_interner: crate::ssa::ir::FieldInterner::default(),
field_writes: std::collections::HashMap::new(),
synthetic_externals: std::collections::HashSet::new(),
};
let demand = DemandState::new(Cap::all());
let (step, _next) = backward_transfer(&ssa, SsaValue(0), &demand);
match step {
BackwardStep::ReachedParam { index, .. } => assert_eq!(index, 2),
other => panic!("expected ReachedParam, got {:?}", other),
}
}
#[test]
fn backward_transfer_assign_fans_out() {
let (ssa, _cfg) = build_trivial_source_body();
let demand = DemandState::new(Cap::all());
let (step, next) = backward_transfer(&ssa, SsaValue(1), &demand);
assert_eq!(step, BackwardStep::Assign);
assert_eq!(next.len(), 1);
assert_eq!(next[0].0, SsaValue(0));
assert_eq!(next[0].1.caps, Cap::all());
}
#[test]
fn backward_transfer_phi_fans_out() {
let mut cfg: Graph<NodeInfo, EdgeKind> = Graph::new();
let n0 = cfg.add_node(NodeInfo::default());
let n1 = cfg.add_node(NodeInfo::default());
let n2 = cfg.add_node(NodeInfo::default());
let n3 = cfg.add_node(NodeInfo::default());
let ssa = SsaBody {
blocks: vec![
SsaBlock {
id: BlockId(0),
phis: vec![],
body: vec![SsaInst {
value: SsaValue(0),
op: SsaOp::Source,
cfg_node: n0,
var_name: None,
span: (0, 0),
}],
terminator: Terminator::Goto(BlockId(2)),
preds: SmallVec::new(),
succs: smallvec![BlockId(2)],
},
SsaBlock {
id: BlockId(1),
phis: vec![],
body: vec![SsaInst {
value: SsaValue(1),
op: SsaOp::Const(None),
cfg_node: n1,
var_name: None,
span: (0, 0),
}],
terminator: Terminator::Goto(BlockId(2)),
preds: SmallVec::new(),
succs: smallvec![BlockId(2)],
},
SsaBlock {
id: BlockId(2),
phis: vec![SsaInst {
value: SsaValue(2),
op: SsaOp::Phi(smallvec![
(BlockId(0), SsaValue(0)),
(BlockId(1), SsaValue(1))
]),
cfg_node: n2,
var_name: None,
span: (0, 0),
}],
body: vec![],
terminator: Terminator::Return(Some(SsaValue(2))),
preds: smallvec![BlockId(0), BlockId(1)],
succs: SmallVec::new(),
},
],
entry: BlockId(0),
value_defs: vec![
make_value_def(BlockId(0), n0),
make_value_def(BlockId(1), n1),
make_value_def(BlockId(2), n2),
make_value_def(BlockId(2), n3),
],
cfg_node_map: std::collections::HashMap::new(),
exception_edges: Vec::new(),
field_interner: crate::ssa::ir::FieldInterner::default(),
field_writes: std::collections::HashMap::new(),
synthetic_externals: std::collections::HashSet::new(),
};
let demand = DemandState::new(Cap::all());
let (step, next) = backward_transfer(&ssa, SsaValue(2), &demand);
assert_eq!(step, BackwardStep::Phi);
assert_eq!(next.len(), 2);
}
#[test]
fn driver_walks_source_to_sink() {
let (ssa, cfg) = build_trivial_source_body();
let ctx = BackwardsCtx::new(&ssa, &cfg, Lang::Python);
let flows = analyse_sink_backwards(&ctx, SsaValue(1), NodeIndex::new(1), Cap::all());
assert_eq!(flows.len(), 1, "one source-reaching flow expected");
assert!(flows[0].is_confirmation(), "flow should confirm");
assert_eq!(
flows[0].sink_node.index(),
1,
"sink_node passthrough preserved"
);
}
#[test]
fn driver_phi_yields_two_flows() {
let mut cfg: Graph<NodeInfo, EdgeKind> = Graph::new();
let n0 = cfg.add_node(NodeInfo::default());
let n1 = cfg.add_node(NodeInfo::default());
let n2 = cfg.add_node(NodeInfo::default());
let ssa = SsaBody {
blocks: vec![
SsaBlock {
id: BlockId(0),
phis: vec![],
body: vec![SsaInst {
value: SsaValue(0),
op: SsaOp::Source,
cfg_node: n0,
var_name: None,
span: (0, 0),
}],
terminator: Terminator::Goto(BlockId(2)),
preds: SmallVec::new(),
succs: smallvec![BlockId(2)],
},
SsaBlock {
id: BlockId(1),
phis: vec![],
body: vec![SsaInst {
value: SsaValue(1),
op: SsaOp::Const(None),
cfg_node: n1,
var_name: None,
span: (0, 0),
}],
terminator: Terminator::Goto(BlockId(2)),
preds: SmallVec::new(),
succs: smallvec![BlockId(2)],
},
SsaBlock {
id: BlockId(2),
phis: vec![SsaInst {
value: SsaValue(2),
op: SsaOp::Phi(smallvec![
(BlockId(0), SsaValue(0)),
(BlockId(1), SsaValue(1))
]),
cfg_node: n2,
var_name: None,
span: (0, 0),
}],
body: vec![],
terminator: Terminator::Return(Some(SsaValue(2))),
preds: smallvec![BlockId(0), BlockId(1)],
succs: SmallVec::new(),
},
],
entry: BlockId(0),
value_defs: vec![
make_value_def(BlockId(0), n0),
make_value_def(BlockId(1), n1),
make_value_def(BlockId(2), n2),
],
cfg_node_map: std::collections::HashMap::new(),
exception_edges: Vec::new(),
field_interner: crate::ssa::ir::FieldInterner::default(),
field_writes: std::collections::HashMap::new(),
synthetic_externals: std::collections::HashSet::new(),
};
let ctx = BackwardsCtx::new(&ssa, &cfg, Lang::JavaScript);
let flows = analyse_sink_backwards(&ctx, SsaValue(2), NodeIndex::new(2), Cap::all());
assert_eq!(
flows.iter().filter(|f| f.is_confirmation()).count(),
1,
"exactly one source-reaching flow through the phi"
);
}
#[test]
fn aggregate_verdict_prefers_confirmation() {
let confirmed = BackwardFlow {
sink_value: SsaValue(0),
sink_node: NodeIndex::new(0),
sink_caps: Cap::SQL_QUERY,
source_kind: Some(SourceKind::UserInput),
source_node: Some(NodeIndex::new(1)),
infeasible: false,
budget_exhausted: false,
max_depth: 0,
chain: SmallVec::new(),
};
let infeasible = BackwardFlow {
sink_value: SsaValue(0),
sink_node: NodeIndex::new(0),
sink_caps: Cap::SQL_QUERY,
source_kind: None,
source_node: None,
infeasible: true,
budget_exhausted: false,
max_depth: 0,
chain: SmallVec::new(),
};
assert_eq!(
aggregate_verdict(&[confirmed.clone(), infeasible.clone()]),
FindingVerdict::Confirmed
);
assert_eq!(aggregate_verdict(&[infeasible]), FindingVerdict::Infeasible);
assert_eq!(aggregate_verdict(&[]), FindingVerdict::Inconclusive);
}
#[test]
fn annotate_finding_appends_note() {
use crate::evidence::FlowStepKind;
use crate::taint::FlowStepRaw;
let mut f = Finding {
body_id: crate::cfg::BodyId(0),
sink: NodeIndex::new(0),
source: NodeIndex::new(1),
path: vec![],
source_kind: SourceKind::UserInput,
path_validated: false,
guard_kind: None,
hop_count: 0,
cap_specificity: 1,
uses_summary: false,
flow_steps: vec![FlowStepRaw {
cfg_node: NodeIndex::new(1),
var_name: None,
op_kind: FlowStepKind::Source,
}],
symbolic: None,
source_span: None,
primary_location: None,
engine_notes: smallvec::SmallVec::new(),
path_hash: 0,
finding_id: String::new(),
alternative_finding_ids: smallvec::SmallVec::new(),
effective_sink_caps: crate::labels::Cap::empty(),
};
annotate_finding(&mut f, FindingVerdict::Confirmed);
let sv = f.symbolic.as_ref().expect("symbolic verdict created");
assert!(sv.cutoff_notes.iter().any(|n| n == NOTE_CONFIRMED));
annotate_finding(&mut f, FindingVerdict::Confirmed);
let sv = f.symbolic.as_ref().unwrap();
assert_eq!(
sv.cutoff_notes
.iter()
.filter(|n| *n == NOTE_CONFIRMED)
.count(),
1
);
}
#[test]
fn annotate_finding_inconclusive_no_change() {
let mut f = Finding {
body_id: crate::cfg::BodyId(0),
sink: NodeIndex::new(0),
source: NodeIndex::new(1),
path: vec![],
source_kind: SourceKind::Unknown,
path_validated: false,
guard_kind: None,
hop_count: 0,
cap_specificity: 0,
uses_summary: false,
flow_steps: vec![],
symbolic: None,
source_span: None,
primary_location: None,
engine_notes: smallvec::SmallVec::new(),
path_hash: 0,
finding_id: String::new(),
alternative_finding_ids: smallvec::SmallVec::new(),
effective_sink_caps: crate::labels::Cap::empty(),
};
annotate_finding(&mut f, FindingVerdict::Inconclusive);
assert!(f.symbolic.is_none());
}
#[test]
fn budget_exhausted_flow_not_confirmation() {
let bf = BackwardFlow {
sink_value: SsaValue(0),
sink_node: NodeIndex::new(0),
sink_caps: Cap::all(),
source_kind: Some(SourceKind::UserInput),
source_node: Some(NodeIndex::new(1)),
infeasible: false,
budget_exhausted: true,
max_depth: 0,
chain: SmallVec::new(),
};
assert!(!bf.is_confirmation());
}
}