use super::*;
use std::io::Write;
fn push_name(s: &mut Vec<u8>, name: &str) -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
}
const BTF_MAGIC: u16 = 0xEB9F;
const BTF_VERSION: u8 = 1;
const BTF_HEADER_LEN: u32 = 24;
const BTF_KIND_INT: u32 = 1;
const BTF_KIND_PTR: u32 = 2;
const BTF_KIND_ARRAY: u32 = 3;
const BTF_KIND_STRUCT: u32 = 4;
const BTF_KIND_UNION: u32 = 5;
const BTF_KIND_FWD: u32 = 7;
const BTF_KIND_TYPEDEF: u32 = 8;
const BTF_KIND_VOLATILE: u32 = 9;
const BTF_KIND_CONST: u32 = 10;
const BTF_KIND_FUNC: u32 = 12;
const BTF_KIND_FUNC_PROTO: u32 = 13;
const BTF_KIND_VAR: u32 = 14;
const BTF_KIND_DATASEC: u32 = 15;
const KIND_FLAG_BIT: u32 = 1 << 31;
#[derive(Clone, Copy)]
struct SynMember {
name_off: u32,
type_id: u32,
byte_offset: u32,
}
#[derive(Clone, Copy)]
struct SynParam {
name_off: u32,
type_id: u32,
}
#[derive(Clone, Copy)]
struct SynMemberBits {
name_off: u32,
type_id: u32,
bit_offset: u32,
bitfield_size_bits: u32,
}
#[allow(dead_code)] enum SynType {
Int {
name_off: u32,
size: u32,
encoding: u32,
offset: u32,
bits: u32,
},
Ptr {
type_id: u32,
},
Array {
type_id: u32,
index_type_id: u32,
nelems: u32,
},
Struct {
name_off: u32,
size: u32,
members: Vec<SynMember>,
},
Union {
name_off: u32,
size: u32,
members: Vec<SynMember>,
},
StructBitfields {
name_off: u32,
size: u32,
members: Vec<SynMemberBits>,
},
Fwd {
name_off: u32,
kind_flag: u32,
},
Typedef {
name_off: u32,
type_id: u32,
},
Volatile {
type_id: u32,
},
Const {
type_id: u32,
},
Func {
name_off: u32,
type_id: u32,
linkage: u32,
},
FuncProto {
return_type_id: u32,
params: Vec<SynParam>,
},
Var {
name_off: u32,
type_id: u32,
linkage: u32,
},
Datasec {
name_off: u32,
size: u32,
entries: Vec<SynVarSecinfo>,
},
}
#[derive(Clone, Copy)]
struct SynVarSecinfo {
type_id: u32,
offset: u32,
size: u32,
}
fn build_btf(types: &[SynType], strings: &[u8]) -> Vec<u8> {
let mut type_section = Vec::new();
for ty in types {
match ty {
SynType::Int {
name_off,
size,
encoding,
offset,
bits,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_INT << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
let int_data = (*encoding << 24) | ((*offset & 0xff) << 16) | (*bits & 0xff);
type_section.extend_from_slice(&int_data.to_le_bytes());
}
SynType::Ptr { type_id } => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_PTR << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynType::Array {
type_id,
index_type_id,
nelems,
} => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_ARRAY << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
let size_type: u32 = 0;
type_section.extend_from_slice(&size_type.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
type_section.extend_from_slice(&index_type_id.to_le_bytes());
type_section.extend_from_slice(&nelems.to_le_bytes());
}
SynType::Struct {
name_off,
size,
members,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = members.len() as u32;
let info = ((BTF_KIND_STRUCT << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
for m in members {
type_section.extend_from_slice(&m.name_off.to_le_bytes());
type_section.extend_from_slice(&m.type_id.to_le_bytes());
let bit_off = m.byte_offset * 8;
type_section.extend_from_slice(&bit_off.to_le_bytes());
}
}
SynType::Union {
name_off,
size,
members,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = members.len() as u32;
let info = ((BTF_KIND_UNION << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
for m in members {
type_section.extend_from_slice(&m.name_off.to_le_bytes());
type_section.extend_from_slice(&m.type_id.to_le_bytes());
let bit_off = m.byte_offset * 8;
type_section.extend_from_slice(&bit_off.to_le_bytes());
}
}
SynType::StructBitfields {
name_off,
size,
members,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = members.len() as u32;
let info =
(((BTF_KIND_STRUCT << 24) & 0x1f00_0000) | (vlen & 0xffff)) | KIND_FLAG_BIT;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
for m in members {
type_section.extend_from_slice(&m.name_off.to_le_bytes());
type_section.extend_from_slice(&m.type_id.to_le_bytes());
let packed =
((m.bitfield_size_bits & 0xff) << 24) | (m.bit_offset & 0x00ff_ffff);
type_section.extend_from_slice(&packed.to_le_bytes());
}
}
SynType::Fwd {
name_off,
kind_flag,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = ((BTF_KIND_FWD << 24) & 0x1f00_0000) | ((*kind_flag & 0x1) << 31);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&0u32.to_le_bytes());
}
SynType::Typedef { name_off, type_id } => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_TYPEDEF << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynType::Volatile { type_id } => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_VOLATILE << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynType::Const { type_id } => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_CONST << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynType::Func {
name_off,
type_id,
linkage,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = ((BTF_KIND_FUNC << 24) & 0x1f00_0000) | (*linkage & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynType::FuncProto {
return_type_id,
params,
} => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = params.len() as u32;
let info = ((BTF_KIND_FUNC_PROTO << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&return_type_id.to_le_bytes());
for p in params {
type_section.extend_from_slice(&p.name_off.to_le_bytes());
type_section.extend_from_slice(&p.type_id.to_le_bytes());
}
}
SynType::Var {
name_off,
type_id,
linkage,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (BTF_KIND_VAR << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
type_section.extend_from_slice(&linkage.to_le_bytes());
}
SynType::Datasec {
name_off,
size,
entries,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = entries.len() as u32;
let info = ((BTF_KIND_DATASEC << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
for e in entries {
type_section.extend_from_slice(&e.type_id.to_le_bytes());
type_section.extend_from_slice(&e.offset.to_le_bytes());
type_section.extend_from_slice(&e.size.to_le_bytes());
}
}
}
}
let type_len = type_section.len() as u32;
let str_len = strings.len() as u32;
let mut blob = Vec::new();
blob.write_all(&BTF_MAGIC.to_le_bytes()).unwrap();
blob.push(BTF_VERSION);
blob.push(0); blob.write_all(&BTF_HEADER_LEN.to_le_bytes()).unwrap();
blob.write_all(&0u32.to_le_bytes()).unwrap(); blob.write_all(&type_len.to_le_bytes()).unwrap();
blob.write_all(&type_len.to_le_bytes()).unwrap(); blob.write_all(&str_len.to_le_bytes()).unwrap();
blob.extend_from_slice(&type_section);
blob.extend_from_slice(strings);
blob
}
fn btf_with_source_and_target(field_off: u32, target_off: u32) -> (Vec<u8>, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let n_int = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: field_off + 8,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: field_off,
}],
},
SynType::Struct {
name_off: n_q,
size: target_off + 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: target_off,
}],
},
];
(build_btf(&types, &strings), 2, 3)
}
fn mk_insn(code: u8, dst: u8, src: u8, off: i16, imm: i32) -> BpfInsn {
BpfInsn::new(code, dst, src, off, imm)
}
fn ldx(size: u8, dst: u8, src: u8, off: i16) -> BpfInsn {
mk_insn(BPF_CLASS_LDX | size | BPF_MODE_MEM, dst, src, off, 0)
}
fn stx(size: u8, dst: u8, src: u8, off: i16) -> BpfInsn {
mk_insn(BPF_CLASS_STX | size | BPF_MODE_MEM, dst, src, off, 0)
}
fn mov_x(dst: u8, src: u8) -> BpfInsn {
mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, dst, src, 0, 0)
}
fn mov_k(dst: u8, imm: i32) -> BpfInsn {
mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV, dst, 0, 0, imm)
}
fn call() -> BpfInsn {
mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, 0, 0, 1)
}
fn kfunc_call(kfunc_btf_id: u32) -> BpfInsn {
mk_insn(
BPF_CLASS_JMP | BPF_OP_CALL,
0,
BPF_PSEUDO_KFUNC_CALL,
0,
kfunc_btf_id as i32,
)
}
fn exit() -> BpfInsn {
mk_insn(BPF_CLASS_JMP | BPF_OP_EXIT, 0, 0, 0, 0)
}
fn addr_space_cast(dst: u8, src: u8, imm: i32) -> BpfInsn {
mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, dst, src, 1, imm)
}
#[test]
fn empty_insns_yields_empty_map() {
let (blob, _t, _q) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let map = analyze_casts(&[], &btf, &[], &[], &[], &[]);
assert!(map.is_empty());
}
#[test]
fn no_initial_seed_yields_empty_map() {
let (blob, _t, _q) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(map.is_empty());
}
#[test]
fn simple_cast_recovers_target() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(4, 2, 1),
ldx(BPF_SIZE_DW, 3, 4, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"got: {map:?}"
);
}
#[test]
fn shape_inference_alone_drops_without_arena_confirmed() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns_no_evidence = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map_no_evidence = analyze_casts(
&insns_no_evidence,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map_no_evidence.is_empty(),
"shape inference without `arena_confirmed` / `arena_stx_findings` \
must drop per the F1 mitigation: {map_no_evidence:?}"
);
let insns_with_evidence = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(4, 2, 1),
ldx(BPF_SIZE_DW, 3, 4, 0),
exit(),
];
let map_with_evidence = analyze_casts(
&insns_with_evidence,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map_with_evidence.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"with addr_space_cast evidence the same shape MUST emit, \
proving (a)'s empty result is the F1 gate firing: \
{map_with_evidence:?}"
);
}
#[test]
fn f1_mitigation_rejects_shape_inference_without_evidence() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_u32 = push_name(&mut strings, "u32");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q,
size: 12,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 2,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 3;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_W, 4, 2, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"multi-offset shape inference with NO direct arena evidence \
(no addr_space_cast, no STX-flow tag) must drop per F1 \
mitigation: {map:?}"
);
}
#[test]
fn ambiguous_targets_drop_silently() {
let mut strings: Vec<u8> = vec![0];
let n_int = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q1 = push_name(&mut strings, "Q1");
let n_q2 = push_name(&mut strings, "Q2");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q1,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_q2,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: 2,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "ambiguous candidates must drop: {map:?}");
}
#[test]
fn multi_offset_disambiguates_target() {
let mut strings: Vec<u8> = vec![0];
let n_u32 = push_name(&mut strings, "u32");
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q1 = push_name(&mut strings, "Q1");
let n_q2 = push_name(&mut strings, "Q2");
let n_f = push_name(&mut strings, "f");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 2,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q1,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 2,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 2,
byte_offset: 8,
},
],
},
SynType::Struct {
name_off: n_q2,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 2,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 3;
let q1_id = 4;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_DW, 4, 2, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q1_id,
addr_space: AddrSpace::Arena,
}),
"map: {map:?}"
);
}
#[test]
fn multiple_distinct_casts_recorded() {
let mut strings: Vec<u8> = vec![0];
let n_u32 = push_name(&mut strings, "u32");
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q1 = push_name(&mut strings, "Q1");
let n_q2 = push_name(&mut strings, "Q2");
let n_f1 = push_name(&mut strings, "f1");
let n_f2 = push_name(&mut strings, "f2");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 24,
members: vec![
SynMember {
name_off: n_f1,
type_id: 2,
byte_offset: 8,
},
SynMember {
name_off: n_f2,
type_id: 2,
byte_offset: 16,
},
],
},
SynType::Struct {
name_off: n_q1,
size: 16,
members: vec![SynMember {
name_off: n_a,
type_id: 2,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q2,
size: 12,
members: vec![
SynMember {
name_off: n_a,
type_id: 2,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 3;
let q1_id = 4;
let q2_id = 5;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 8),
mov_k(2, 0),
ldx(BPF_SIZE_DW, 2, 1, 16),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 4, 2, 0),
ldx(BPF_SIZE_W, 5, 2, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q1_id,
addr_space: AddrSpace::Arena,
}),
"f1: {map:?}"
);
assert_eq!(
map.get(&(t_id, 16)),
Some(&CastHit {
alloc_size: None,
target_type_id: q2_id,
addr_space: AddrSpace::Arena,
}),
"f2: {map:?}"
);
}
#[test]
fn register_reuse_after_call_clears_state() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8), call(), ldx(BPF_SIZE_DW, 3, 2, 0), exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"post-call r2 must not retain T.f source: {map:?}"
);
}
#[test]
fn nondw_load_does_not_track_u64_field() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_W, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "32-bit load must not seed cast: {map:?}");
}
#[test]
fn ptr_field_tracked_as_typed_pointer_not_cast() {
let mut strings: Vec<u8> = vec![0];
let n_int = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 3,
byte_offset: 8,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 4;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"typed Ptr field must not be recorded as cast: {map:?}"
);
}
#[test]
fn null_check_fall_through_preserves_state() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let variants: &[(u8, &str)] = &[
(BPF_CLASS_JMP | 0x10, "JEQ_K"),
(BPF_CLASS_JMP | 0x10 | BPF_SRC_X, "JEQ_X"),
(BPF_CLASS_JMP | 0x20, "JGT_K"),
(BPF_CLASS_JMP | 0x30, "JGE_K"),
(BPF_CLASS_JMP | 0x40, "JSET_K"),
(BPF_CLASS_JMP | 0x50, "JNE_K"),
(BPF_CLASS_JMP | 0x60, "JSGT_K"),
(BPF_CLASS_JMP | 0x70, "JSGE_K"),
(BPF_CLASS_JMP | 0xa0, "JLT_K"),
(BPF_CLASS_JMP | 0xb0, "JLE_K"),
(BPF_CLASS_JMP | 0xc0, "JSLT_K"),
(BPF_CLASS_JMP | 0xd0, "JSLE_K"),
(BPF_CLASS_JMP32 | 0x10, "JEQ32_K"),
];
for (code, label) in variants {
let jcc = mk_insn(*code, 2, 0, 1, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
jcc,
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"{label}: exactly one cast expected on fall-through, got: {map:?}"
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"{label}: fall-through deref must record: {map:?}"
);
}
}
#[test]
fn deref_at_jump_target_is_dropped() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let jne = mk_insn(BPF_CLASS_JMP | 0x50, 2, 0, 1, 0); let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
jne,
exit(),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "deref at branch target must drop: {map:?}");
}
#[test]
fn mov_x_propagates_loaded_state() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
mov_x(4, 2),
ldx(BPF_SIZE_DW, 3, 4, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"MOV must propagate: {map:?}"
);
}
#[test]
fn ld_imm64_skips_second_slot() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let ld_imm64_lo = mk_insn(BPF_CLASS_LD | BPF_SIZE_DW | BPF_MODE_IMM, 6, 0, 0, 0);
let ld_imm64_hi = mk_insn(0, 0, 0, 0, 0);
let insns = vec![
ld_imm64_lo,
ld_imm64_hi,
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"LD_IMM64 second slot must skip: {map:?}"
);
}
#[test]
fn r10_seed_rejected() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 10, 8),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 10,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "r10 seed must be ignored: {map:?}");
}
#[test]
fn nonu64_field_at_source_offset_not_tracked() {
let mut strings: Vec<u8> = vec![0];
let n_u32 = push_name(&mut strings, "u32");
let n_t = push_name(&mut strings, "T");
let n_f = push_name(&mut strings, "f");
let types = vec![
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Struct {
name_off: n_t,
size: 12,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"u32-typed field must not seed cast: {map:?}"
);
}
fn btf_kptr_base(slot_off: u32) -> (Vec<u8>, u32, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
];
let blob = build_btf(&types, &strings);
(blob, 2, 4, 3)
}
#[test]
fn kptr_from_function_param_stored_to_u64_field() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![mov_x(6, 1), stx(BPF_SIZE_DW, 2, 6, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 2,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kptr STX must record kernel-space cast: {map:?}"
);
}
#[test]
fn kptr_through_stack_spill() {
let slot_off: u32 = 24;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8), ldx(BPF_SIZE_DW, 3, 10, -8), stx(BPF_SIZE_DW, 4, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 4,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"stack spill must preserve typed pointer: {map:?}"
);
}
#[test]
fn kptr_from_kfunc_return() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let n_kfunc = push_name(&mut strings, "bpf_task_acquire");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::FuncProto {
return_type_id: 3,
params: vec![],
},
SynType::Func {
name_off: n_kfunc,
type_id: 5,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let kfunc_id = 6;
let insns = vec![
kfunc_call(kfunc_id),
stx(BPF_SIZE_DW, 6, 0, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kfunc-returned T* stored to P.slot must record: {map:?}"
);
}
#[test]
fn kptr_clobbered_by_call() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![call(), stx(BPF_SIZE_DW, 6, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"post-call clobbered R1 must not record kptr: {map:?}"
);
}
#[test]
fn mixed_arena_and_kptr_in_one_program() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_a = push_name(&mut strings, "A");
let n_m = push_name(&mut strings, "M");
let n_x = push_name(&mut strings, "x");
let n_a0 = push_name(&mut strings, "a0");
let n_a1 = push_name(&mut strings, "a1");
let n_arena_ptr = push_name(&mut strings, "arena_ptr");
let n_kptr = push_name(&mut strings, "kptr");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_a,
size: 16,
members: vec![
SynMember {
name_off: n_a0,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_a1,
type_id: 1,
byte_offset: 8,
},
],
},
SynType::Struct {
name_off: n_m,
size: 24,
members: vec![
SynMember {
name_off: n_arena_ptr,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_kptr,
type_id: 1,
byte_offset: 16,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let a_id = 4;
let m_id = 5;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 0),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_DW, 4, 2, 8),
stx(BPF_SIZE_DW, 1, 6, 16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: m_id,
},
InitialReg {
reg: 6,
struct_type_id: t_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(m_id, 0)),
Some(&CastHit {
alloc_size: None,
target_type_id: a_id,
addr_space: AddrSpace::Arena,
}),
"arena cast missing: {map:?}"
);
assert_eq!(
map.get(&(m_id, 16)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kernel kptr missing: {map:?}"
);
}
#[test]
fn func_entry_seeding_from_btf() {
let slot1: u32 = 16; let slot3: u32 = 24; let slot4: u32 = 32; let slot5: u32 = 40; let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_s1 = push_name(&mut strings, "s1");
let n_s3 = push_name(&mut strings, "s3");
let n_s4 = push_name(&mut strings, "s4");
let n_s5 = push_name(&mut strings, "s5");
let n_arg_t = push_name(&mut strings, "task");
let n_arg_p = push_name(&mut strings, "parent");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 }, SynType::Struct {
name_off: n_p,
size: slot5 + 8,
members: vec![
SynMember {
name_off: n_s1,
type_id: 1,
byte_offset: slot1,
},
SynMember {
name_off: n_s3,
type_id: 1,
byte_offset: slot3,
},
SynMember {
name_off: n_s4,
type_id: 1,
byte_offset: slot4,
},
SynMember {
name_off: n_s5,
type_id: 1,
byte_offset: slot5,
},
],
},
SynType::Ptr { type_id: 4 }, SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_arg_t,
type_id: 3,
},
SynParam {
name_off: n_arg_p,
type_id: 5,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let proto_id = 6;
let insns = vec![
stx(BPF_SIZE_DW, 2, 1, slot1 as i16),
stx(BPF_SIZE_DW, 2, 3, slot3 as i16),
stx(BPF_SIZE_DW, 2, 4, slot4 as i16),
stx(BPF_SIZE_DW, 2, 5, slot5 as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[],
&[FuncEntry {
insn_offset: 0,
func_proto_id: proto_id,
}],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"FuncEntry must seed only R1 and R2; R3..R5 stay Unknown so \
only the R1->slot1 STX records: {map:?}"
);
assert_eq!(
map.get(&(p_id, slot1)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"FuncEntry param seeding must populate R1 and R2: {map:?}"
);
assert!(
!map.contains_key(&(p_id, slot3)),
"R3 must remain Unknown post-FuncEntry: {map:?}"
);
assert!(
!map.contains_key(&(p_id, slot4)),
"R4 must remain Unknown post-FuncEntry: {map:?}"
);
assert!(
!map.contains_key(&(p_id, slot5)),
"R5 must remain Unknown post-FuncEntry: {map:?}"
);
}
#[test]
fn addr_space_cast_arena_alone_does_not_emit() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let cast = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 1, 1);
let insns_cast_only = vec![ldx(BPF_SIZE_DW, 3, 1, 8), cast, exit()];
let map_cast_only = analyze_casts(
&insns_cast_only,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map_cast_only.is_empty(),
"arena_confirmed alone (no deref pattern) must not emit: {map_cast_only:?}"
);
let insns_cast_plus_kptr = vec![
ldx(BPF_SIZE_DW, 3, 1, 8),
cast,
stx(BPF_SIZE_DW, 1, 5, 8),
exit(),
];
let map_cast_plus_kptr = analyze_casts(
&insns_cast_plus_kptr,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 5,
struct_type_id: q_id,
},
],
&[],
&[],
&[],
);
assert!(
map_cast_plus_kptr.is_empty(),
"cast + same-slot STX must conflict-drop both observations \
(proves arena_confirmed was populated): {map_cast_plus_kptr:?}"
);
let insns_kptr_only = vec![stx(BPF_SIZE_DW, 1, 5, 8), exit()];
let map_kptr_only = analyze_casts(
&insns_kptr_only,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 5,
struct_type_id: q_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map_kptr_only.len(),
1,
"STX-only baseline must record exactly one kptr finding: {map_kptr_only:?}"
);
assert_eq!(
map_kptr_only.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
}),
"STX-only baseline records (T, 8) -> (Q, Kernel): {map_kptr_only:?}"
);
}
#[test]
fn addr_space_cast_kernel_to_arena_drops_dst() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let cast = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 1, 0x10000);
let arena_confirm = addr_space_cast(7, 3, 1);
let insns = vec![
ldx(BPF_SIZE_DW, 3, 1, 8),
arena_confirm,
cast,
ldx(BPF_SIZE_DW, 5, 4, 0),
ldx(BPF_SIZE_DW, 6, 3, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"exactly one cast (via preserved r3) expected: {map:?}"
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"cast preserves src LoadedU64Field; dst-only invalidation: {map:?}"
);
assert!(
!map.keys().any(|k| *k != (t_id, 8)),
"no record may originate from r4 (cast-clobbered dst): {map:?}"
);
}
#[test]
fn sign_extend_mov_drops_state() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let sxt = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 8, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 3, 1, 8),
sxt,
ldx(BPF_SIZE_DW, 5, 4, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"sign-extend MOV must drop typed state: {map:?}"
);
}
fn atomic_stx(dst: u8, src: u8, off: i16, imm: i32) -> BpfInsn {
mk_insn(
BPF_CLASS_STX | BPF_SIZE_DW | BPF_MODE_ATOMIC,
dst,
src,
off,
imm,
)
}
#[test]
fn atomic_xchg_clobbers_src() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, 0xe0 | BPF_FETCH),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"XCHG must clobber src R1 typed state: {map:?}"
);
}
#[test]
fn atomic_cmpxchg_clobbers_r0() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, 0xf0 | BPF_FETCH),
stx(BPF_SIZE_DW, 6, 0, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 0,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"CMPXCHG must clobber R0 typed state: {map:?}"
);
}
#[test]
fn atomic_non_fetch_preserves_regs() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
const BPF_ATOMIC_ADD: i32 = 0x00;
const BPF_ATOMIC_OR: i32 = 0x40;
const BPF_ATOMIC_AND: i32 = 0x50;
const BPF_ATOMIC_XOR: i32 = 0xa0;
for imm in [
BPF_ATOMIC_ADD,
BPF_ATOMIC_OR,
BPF_ATOMIC_AND,
BPF_ATOMIC_XOR,
] {
let insns = vec![
atomic_stx(2, 1, 0, imm),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"imm=0x{imm:02x}: exactly one kptr finding expected, got: {map:?}"
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"imm=0x{imm:02x}: non-fetch ATOMIC must preserve src register: {map:?}"
);
}
}
#[test]
fn atomic_on_stack_invalidates_slot() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
atomic_stx(10, 2, -8, 0xe0 | BPF_FETCH),
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"ATOMIC on stack slot must invalidate, reload yields Unknown: {map:?}"
);
}
#[test]
fn atomic_load_acq_clobbers_dst() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(1, 2, 0, BPF_LOAD_ACQ_IMM),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"LOAD_ACQ must clobber dst R1 typed state: {map:?}"
);
}
#[test]
fn atomic_store_rel_preserves_src_and_dst() {
let slot_off1: u32 = 16;
let slot_off2: u32 = 24;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot1 = push_name(&mut strings, "slot1");
let n_slot2 = push_name(&mut strings, "slot2");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_p,
size: slot_off2 + 8,
members: vec![
SynMember {
name_off: n_slot1,
type_id: 1,
byte_offset: slot_off1,
},
SynMember {
name_off: n_slot2,
type_id: 1,
byte_offset: slot_off2,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let insns = vec![
atomic_stx(7, 1, 0, BPF_STORE_REL_IMM),
stx(BPF_SIZE_DW, 6, 1, slot_off1 as i16),
stx(BPF_SIZE_DW, 6, 2, slot_off2 as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 2,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off1)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"STORE_REL must preserve src R1 typed state (slot1 missing): {map:?}"
);
assert_eq!(
map.get(&(p_id, slot_off2)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"STORE_REL must not affect uninvolved R2 (slot2 missing): {map:?}"
);
}
#[test]
fn atomic_store_rel_invalidates_stack_slot() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
atomic_stx(10, 2, -8, BPF_STORE_REL_IMM),
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"STORE_REL through r10 must invalidate slot, reload Unknown: {map:?}"
);
}
#[test]
fn atomic_add_fetch_clobbers_src() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, BPF_FETCH),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"ADD|FETCH must clobber src R1 typed state: {map:?}"
);
}
#[test]
fn atomic_and_fetch_clobbers_src() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, 0x50 | BPF_FETCH),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"AND|FETCH must clobber src R1 typed state: {map:?}"
);
}
#[test]
fn atomic_or_fetch_clobbers_src() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, 0x40 | BPF_FETCH),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"OR|FETCH must clobber src R1 typed state: {map:?}"
);
}
#[test]
fn atomic_xor_fetch_clobbers_src() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, 0xa0 | BPF_FETCH),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"XOR|FETCH must clobber src R1 typed state: {map:?}"
);
}
#[test]
fn atomic_w_size_invalidates_stack_slot() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let atomic_w = mk_insn(
BPF_CLASS_STX | BPF_SIZE_W | BPF_MODE_ATOMIC,
10,
2,
-8,
0xe0 | BPF_FETCH,
);
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
atomic_w,
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"W-size ATOMIC on stack slot must invalidate, reload Unknown: {map:?}"
);
}
#[test]
fn atomic_cmpxchg_clobbers_src() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
atomic_stx(2, 1, 0, 0xf0 | BPF_FETCH),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"CMPXCHG must clobber src R1 typed state via has_fetch arm: {map:?}"
);
}
#[test]
fn stack_spill_overwrite_uses_latest() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t1 = push_name(&mut strings, "T1");
let n_t2 = push_name(&mut strings, "T2");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t1,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_t2,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t1_id = 2;
let t2_id = 3;
let p_id = 4;
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
stx(BPF_SIZE_DW, 10, 2, -8),
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t1_id,
},
InitialReg {
reg: 2,
struct_type_id: t2_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t2_id,
addr_space: AddrSpace::Kernel,
}),
"second spill to same slot must win: {map:?}"
);
}
#[test]
fn stack_spill_survives_helper_call() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
call(),
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"stack-spilled pointer must survive helper call: {map:?}"
);
}
#[test]
fn sub_dw_spill_invalidates() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
stx(BPF_SIZE_W, 10, 1, -8),
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"sub-DW store must invalidate slot, reload Unknown: {map:?}"
);
}
#[test]
fn st_imm_invalidates_stack_slot() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let st_imm_dw = mk_insn(BPF_CLASS_ST | BPF_MODE_MEM | BPF_SIZE_DW, 10, 0, -8, 0);
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
st_imm_dw,
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"BPF_ST imm to stack slot must invalidate, reload Unknown: {map:?}"
);
}
#[test]
fn arena_and_kptr_same_field_drops_both() {
let slot_off: u32 = 8;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, slot_off as i16),
ldx(BPF_SIZE_DW, 3, 2, 0),
stx(BPF_SIZE_DW, 1, 6, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: p_id,
},
InitialReg {
reg: 6,
struct_type_id: t_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"arena+kptr conflict on same slot must drop both: {map:?}"
);
}
#[test]
fn kptr_conflict_two_targets_drops() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t1 = push_name(&mut strings, "T1");
let n_t2 = push_name(&mut strings, "T2");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t1,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_t2,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t1_id = 2;
let t2_id = 3;
let p_id = 4;
let seeds = [
InitialReg {
reg: 1,
struct_type_id: t1_id,
},
InitialReg {
reg: 2,
struct_type_id: t2_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
];
let insns_single = vec![stx(BPF_SIZE_DW, 6, 1, slot_off as i16), exit()];
let map_single = analyze_casts(&insns_single, &btf, &seeds, &[], &[], &[]);
assert_eq!(
map_single.len(),
1,
"(a) single STX must record exactly one finding: {map_single:?}"
);
assert_eq!(
map_single.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t1_id,
addr_space: AddrSpace::Kernel,
}),
"(a) baseline records (P, slot) -> (T1, Kernel): {map_single:?}"
);
let insns_conflict = vec![
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
stx(BPF_SIZE_DW, 6, 2, slot_off as i16),
exit(),
];
let map_conflict = analyze_casts(&insns_conflict, &btf, &seeds, &[], &[], &[]);
assert!(
map_conflict.is_empty(),
"(b) two distinct kptr targets on same slot must collapse to \
Conflicting and drop: {map_conflict:?}"
);
let insns_three = vec![
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
stx(BPF_SIZE_DW, 6, 2, slot_off as i16),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map_three = analyze_casts(&insns_three, &btf, &seeds, &[], &[], &[]);
assert!(
map_three.is_empty(),
"(c) Conflicting state must be sticky across same-target \
restore — third STX of T1 must not resurrect: {map_three:?}"
);
}
#[test]
fn oob_dst_reg_does_not_panic() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let bad = BpfInsn::new(BPF_CLASS_LDX | BPF_SIZE_DW | BPF_MODE_MEM, 11, 1, 8, 0);
let insns = vec![bad, exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "OOB dst must not panic, map empty: {map:?}");
}
#[test]
fn oob_src_reg_does_not_panic() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let bad = BpfInsn::new(BPF_CLASS_LDX | BPF_SIZE_DW | BPF_MODE_MEM, 2, 15, 8, 0);
let insns = vec![bad, exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "OOB src must not panic, map empty: {map:?}");
}
#[test]
fn self_store_rejected() {
let slot_off: u32 = 8;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_slot = push_name(&mut strings, "slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id = 2;
let insns = vec![stx(BPF_SIZE_DW, 1, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "self-store must be rejected: {map:?}");
}
#[test]
fn variadic_param_breaks_seeding() {
let slot_off1: u32 = 16;
let slot_off2: u32 = 24;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot1 = push_name(&mut strings, "slot1");
let n_slot2 = push_name(&mut strings, "slot2");
let n_arg_t = push_name(&mut strings, "task");
let n_arg_p = push_name(&mut strings, "parent");
let n_arg_after = push_name(&mut strings, "after_variadic");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 }, SynType::Struct {
name_off: n_p,
size: slot_off2 + 8,
members: vec![
SynMember {
name_off: n_slot1,
type_id: 1,
byte_offset: slot_off1,
},
SynMember {
name_off: n_slot2,
type_id: 1,
byte_offset: slot_off2,
},
],
},
SynType::Ptr { type_id: 4 }, SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_arg_t,
type_id: 3,
},
SynParam {
name_off: n_arg_p,
type_id: 5,
},
SynParam {
name_off: 0,
type_id: 0,
},
SynParam {
name_off: n_arg_after,
type_id: 3,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let proto_id = 6;
let insns = vec![
stx(BPF_SIZE_DW, 2, 1, slot_off1 as i16),
stx(BPF_SIZE_DW, 2, 3, slot_off2 as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[],
&[FuncEntry {
insn_offset: 0,
func_proto_id: proto_id,
}],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off1)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"non-variadic params must seed R1 and R2: {map:?}"
);
assert!(
!map.contains_key(&(p_id, slot_off2)),
"variadic sentinel must terminate scan, R3 must stay Unknown: {map:?}"
);
}
#[test]
fn func_entry_clears_all_regs() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let n_arg = push_name(&mut strings, "arg");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: n_arg,
type_id: 3,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let proto_id = 5;
let insns = vec![stx(BPF_SIZE_DW, 6, 3, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 3,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[FuncEntry {
insn_offset: 0,
func_proto_id: proto_id,
}],
&[],
&[],
);
assert!(
map.is_empty(),
"FuncEntry pre-clear must drop R3 typed state: {map:?}"
);
}
#[test]
fn probe_mem_load_treated_as_unknown() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
const BPF_MODE_PROBE_MEM: u8 = 0x20;
let probe_load = mk_insn(BPF_CLASS_LDX | BPF_SIZE_DW | BPF_MODE_PROBE_MEM, 2, 1, 8, 0);
let insns = vec![probe_load, ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"BPF_PROBE_MEM load must mark dst Unknown: {map:?}"
);
}
#[test]
fn finalize_arena_confirmed_conflicts_with_kptr() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let q_id = 4;
let cast = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 2, 1, 1);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
cast,
stx(BPF_SIZE_DW, 1, 3, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 3,
struct_type_id: q_id,
},
],
&[],
&[],
&[],
);
assert!(
!map.contains_key(&(t_id, 8)),
"arena_confirmed + kptr conflict on (T, 8) must drop both: {map:?}"
);
assert!(map.is_empty(), "no other entries expected: {map:?}");
}
#[test]
fn finalize_empty_access_set_does_not_emit() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
!map.contains_key(&(t_id, 8)),
"empty access set must not emit: {map:?}"
);
assert!(map.is_empty(), "no other entries expected: {map:?}");
}
#[test]
fn finalize_source_in_candidates_with_others_emits_other() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
SynType::Struct {
name_off: n_q,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 0),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_DW, 4, 2, 8),
exit(),
];
let q_id = 3;
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 0)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"source removed, sole non-source candidate Q emitted: {map:?}"
);
}
#[test]
fn finalize_only_source_candidate_drops() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_f = push_name(&mut strings, "f");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"candidate set containing only the source must drop: {map:?}"
);
}
#[test]
fn finalize_max_seen_type_id_slack_finds_distant_candidate() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let mut types: Vec<SynType> = Vec::new();
types.push(SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
});
types.push(SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
});
for _ in 0..200 {
types.push(SynType::Ptr { type_id: 1 });
}
types.push(SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
});
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let q_id = 203;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"slack must carry search well past max_seen, capped within \
MAX_BTF_ID_PROBE: {map:?}"
);
}
#[test]
fn struct_member_at_skips_bitfield_at_target_offset() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_f = push_name(&mut strings, "f");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::StructBitfields {
name_off: n_t,
size: 16,
members: vec![SynMemberBits {
name_off: n_f,
type_id: 1,
bit_offset: 8 * 8, bitfield_size_bits: 32, }],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"bitfield at target offset must not seed cast: {map:?}"
);
}
#[test]
fn struct_member_at_skips_non_byte_aligned_member() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_f = push_name(&mut strings, "f");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::StructBitfields {
name_off: n_t,
size: 24,
members: vec![SynMemberBits {
name_off: n_f,
type_id: 1,
bit_offset: 65, bitfield_size_bits: 0, }],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"non-byte-aligned member must not seed cast: {map:?}"
);
}
#[test]
fn member_size_bytes_unsupported_terminals_skipped() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_u = push_name(&mut strings, "U");
let n_fwd_target = push_name(&mut strings, "fwd_struct");
let n_func = push_name(&mut strings, "fn");
let n_fwd_ref = push_name(&mut strings, "fwd_ref");
let n_func_ref = push_name(&mut strings, "func_ref");
let n_void_ref = push_name(&mut strings, "void_ref");
let n_v = push_name(&mut strings, "v");
let n_f = push_name(&mut strings, "f");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Fwd {
name_off: n_fwd_target,
kind_flag: 0,
},
SynType::FuncProto {
return_type_id: 0,
params: vec![],
},
SynType::Func {
name_off: n_func,
type_id: 4,
linkage: 1,
},
SynType::Struct {
name_off: n_u,
size: 32,
members: vec![
SynMember {
name_off: n_fwd_ref,
type_id: 3, byte_offset: 0,
},
SynMember {
name_off: n_func_ref,
type_id: 5, byte_offset: 8,
},
SynMember {
name_off: n_void_ref,
type_id: 0, byte_offset: 16,
},
SynMember {
name_off: n_v,
type_id: 1, byte_offset: 24,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let u_id = 6;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 24),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: u_id,
addr_space: AddrSpace::Arena,
}),
"unsupported terminals must be skipped without crashing: {map:?}"
);
}
#[test]
fn build_layout_index_skips_bitfields_in_candidates() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q1 = push_name(&mut strings, "Q1");
let n_q2 = push_name(&mut strings, "Q2");
let n_f = push_name(&mut strings, "f");
let n_a = push_name(&mut strings, "a");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::StructBitfields {
name_off: n_q1,
size: 8,
members: vec![SynMemberBits {
name_off: n_a,
type_id: 1,
bit_offset: 0,
bitfield_size_bits: 32,
}],
},
SynType::Struct {
name_off: n_q2,
size: 8,
members: vec![SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let q2_id = 4;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q2_id,
addr_space: AddrSpace::Arena,
}),
"bitfield candidate must be skipped: {map:?}"
);
}
#[test]
fn union_works_like_struct_for_layout_and_member_lookup() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_su = push_name(&mut strings, "SourceU");
let n_tu = push_name(&mut strings, "TargetU");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let n_f = push_name(&mut strings, "f");
let n_a = push_name(&mut strings, "a");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Ptr { type_id: 2 }, SynType::Union {
name_off: n_p,
size: 24,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: 16,
}],
},
SynType::Struct {
name_off: n_su,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Union {
name_off: n_tu,
size: 8,
members: vec![SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let source_u_id = 5;
let target_u_id = 6;
let insns = vec![
stx(BPF_SIZE_DW, 1, 6, 16),
ldx(BPF_SIZE_DW, 3, 2, 8),
addr_space_cast(3, 3, 1),
ldx(BPF_SIZE_DW, 4, 3, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: p_id,
},
InitialReg {
reg: 6,
struct_type_id: t_id,
},
InitialReg {
reg: 2,
struct_type_id: source_u_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, 16)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kptr through union parent must record: {map:?}"
);
assert_eq!(
map.get(&(source_u_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: target_u_id,
addr_space: AddrSpace::Arena,
}),
"union target must be a layout candidate: {map:?}"
);
}
#[test]
fn build_layout_index_consecutive_fail_cap_short_circuits() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"valid candidate must be found before fail cap; sparse \
BTF must not panic: {map:?}"
);
}
#[test]
fn kind_flag_struct_includes_non_bitfield_members() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_src = push_name(&mut strings, "src");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_src,
type_id: 1,
byte_offset: 8,
}],
},
SynType::StructBitfields {
name_off: n_q,
size: 16,
members: vec![
SynMemberBits {
name_off: n_a,
type_id: 1,
bit_offset: 0,
bitfield_size_bits: 0,
},
SynMemberBits {
name_off: n_b,
type_id: 1,
bit_offset: 64,
bitfield_size_bits: 32,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let q_id = 3;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"non-bitfield member of kind_flag=1 struct must be a \
layout candidate: {map:?}"
);
}
#[test]
fn stack_off_non_negative_through_r10_invalidates() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, 0),
ldx(BPF_SIZE_DW, 3, 10, 0),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"non-negative r10 store must not save state, reload \
returns Unknown: {map:?}"
);
}
#[test]
fn negative_off_in_non_r10_context_drops() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, -8),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"negative offset through Pointer{{T}} must drop, no \
pattern recorded: {map:?}"
);
}
#[test]
fn stack_spill_same_target_stays_single() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
stx(BPF_SIZE_DW, 10, 1, -8),
stx(BPF_SIZE_DW, 10, 1, -8),
ldx(BPF_SIZE_DW, 3, 10, -8),
stx(BPF_SIZE_DW, 6, 3, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"spill of identical Pointer{{T}} must reload as Pointer{{T}}, \
kptr stays Single: {map:?}"
);
}
#[test]
fn kfunc_call_returning_int_ptr_leaves_r0_unknown() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_slot = push_name(&mut strings, "slot");
let n_kfunc = push_name(&mut strings, "bpf_returns_int_ptr");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::Ptr { type_id: 1 }, SynType::FuncProto {
return_type_id: 3,
params: vec![],
},
SynType::Func {
name_off: n_kfunc,
type_id: 4,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id = 2;
let kfunc_id = 5;
let insns = vec![
kfunc_call(kfunc_id),
stx(BPF_SIZE_DW, 6, 0, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"kfunc returning int* must leave R0 Unknown: {map:?}"
);
}
#[test]
fn kfunc_call_void_return_leaves_r0_unknown() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_slot = push_name(&mut strings, "slot");
let n_kfunc = push_name(&mut strings, "bpf_void_return");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::FuncProto {
return_type_id: 0,
params: vec![],
},
SynType::Func {
name_off: n_kfunc,
type_id: 3,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id = 2;
let kfunc_id = 4;
let insns = vec![
kfunc_call(kfunc_id),
stx(BPF_SIZE_DW, 6, 0, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"kfunc with void return must leave R0 Unknown: {map:?}"
);
}
#[test]
fn kfunc_call_with_funcproto_id_directly() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 }, SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::FuncProto {
return_type_id: 3,
params: vec![],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let proto_id = 5;
let insns = vec![
kfunc_call(proto_id),
stx(BPF_SIZE_DW, 6, 0, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kfunc with direct FuncProto id must seed R0 from return \
type: {map:?}"
);
}
#[test]
fn mov_to_r10_rejected_keeps_r10_unknown() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
mov_x(10, 2),
mov_x(3, 10),
ldx(BPF_SIZE_DW, 4, 3, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"MOV r10, r2 must be rejected so r10 stays Unknown: {map:?}"
);
}
#[test]
fn ldx_into_r10_rejected_keeps_r10_unknown() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 10, 1, 8),
mov_x(3, 10),
ldx(BPF_SIZE_DW, 4, 3, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"LDX r10, [r1+8] must be rejected so r10 stays Unknown: {map:?}"
);
}
#[test]
fn oob_stx_reg_does_not_panic() {
let (blob, _t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let bad = BpfInsn::new(BPF_CLASS_STX | BPF_SIZE_DW | BPF_MODE_MEM, 15, 15, 0, 0);
let insns = vec![bad, exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(
map.is_empty(),
"OOB STX (dst=15, src=15) must not panic: {map:?}"
);
}
#[test]
fn oob_mov_reg_does_not_panic() {
let (blob, _t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let bad = BpfInsn::new(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 15, 0, 0, 0);
let insns = vec![bad, exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(map.is_empty(), "OOB MOV (dst=15) must not panic: {map:?}");
}
#[test]
fn oob_atomic_reg_does_not_panic() {
let (blob, _t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let bad = BpfInsn::new(
BPF_CLASS_STX | BPF_SIZE_DW | BPF_MODE_ATOMIC,
15,
15,
0,
BPF_FETCH | 0xe0,
);
let insns = vec![bad, exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(
map.is_empty(),
"OOB ATOMIC (dst=15, src=15) must not panic: {map:?}"
);
}
#[test]
fn mov_x_unknown_source_overwrites_typed_dst() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
mov_x(2, 3),
ldx(BPF_SIZE_DW, 4, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"MOV with Unknown source must overwrite typed dst: {map:?}"
);
}
#[test]
fn mov_x_self_copy_preserves_state() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
mov_x(2, 2),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"MOV self-copy must preserve LoadedU64Field state: {map:?}"
);
}
#[test]
fn mov32_destroys_typed_state() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let mov32 = mk_insn(BPF_CLASS_ALU | BPF_OP_MOV | BPF_SRC_X, 4, 2, 0, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
mov32,
ldx(BPF_SIZE_DW, 5, 4, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "32-bit MOV must drop typed state: {map:?}");
}
#[test]
fn alu64_add_x_destroys_typed_pointer() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let add_x = mk_insn(
BPF_CLASS_ALU64 | (bs::BPF_ADD as u8) | BPF_SRC_X,
1,
3,
0,
0,
);
let insns = vec![add_x, stx(BPF_SIZE_DW, 6, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"ALU64 ADD X must destroy typed pointer: {map:?}"
);
}
#[test]
fn alu64_sub_x_destroys_typed_pointer() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let sub_x = mk_insn(
BPF_CLASS_ALU64 | (bs::BPF_SUB as u8) | BPF_SRC_X,
1,
3,
0,
0,
);
let insns = vec![sub_x, stx(BPF_SIZE_DW, 6, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"ALU64 SUB X must destroy typed pointer: {map:?}"
);
}
#[test]
fn alu64_and_x_destroys_typed_pointer() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let and_x = mk_insn(
BPF_CLASS_ALU64 | (bs::BPF_AND as u8) | BPF_SRC_X,
1,
3,
0,
0,
);
let insns = vec![and_x, stx(BPF_SIZE_DW, 6, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"ALU64 AND X must destroy typed pointer: {map:?}"
);
}
#[test]
fn alu64_add_k_destroys_typed_pointer() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let add_k = mk_insn(BPF_CLASS_ALU64 | (bs::BPF_ADD as u8), 1, 0, 0, 8);
let insns = vec![add_k, stx(BPF_SIZE_DW, 6, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"ALU64 ADD K must destroy typed pointer: {map:?}"
);
}
#[test]
fn mov_k_destroys_typed_pointer() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
mov_k(1, 42),
stx(BPF_SIZE_DW, 6, 1, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(map.is_empty(), "mov_k must destroy typed pointer: {map:?}");
}
#[test]
fn addr_space_cast_unknown_imm_drops_dst() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let cast = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 1, 2);
let insns = vec![
ldx(BPF_SIZE_DW, 3, 1, 8),
cast,
ldx(BPF_SIZE_DW, 5, 4, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"BPF_ADDR_SPACE_CAST with reserved imm must drop dst: {map:?}"
);
}
#[test]
fn addr_space_cast_arena_imm1_on_pointer_propagates() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let cast = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 1, 1);
let insns = vec![cast, stx(BPF_SIZE_DW, 6, 4, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 3,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"ADDR_SPACE_CAST imm=1 on Pointer{{T}} must propagate state: {map:?}"
);
}
#[test]
fn addr_space_cast_kernel_arena_preserves_pointer_source() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let cast = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 1, 0x10000);
let insns = vec![cast, stx(BPF_SIZE_DW, 6, 4, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 3,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"Pointer through addr_space_cast should produce a kptr CastHit: {map:?}"
);
}
#[test]
fn bpf_ld_abs_clears_r0() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let ld_abs = mk_insn(BPF_CLASS_LD | BPF_SIZE_W | (bs::BPF_ABS as u8), 0, 0, 0, 0);
let insns = vec![ld_abs, stx(BPF_SIZE_DW, 6, 0, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 0,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"BPF_LD_ABS must clear r0 typed state: {map:?}"
);
}
#[test]
fn bpf_ld_ind_clears_r0() {
let slot_off: u32 = 16;
let (blob, t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let ld_ind = mk_insn(BPF_CLASS_LD | BPF_SIZE_W | (bs::BPF_IND as u8), 0, 0, 0, 0);
let insns = vec![ld_ind, stx(BPF_SIZE_DW, 6, 0, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 0,
struct_type_id: t_id,
},
InitialReg {
reg: 6,
struct_type_id: p_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"BPF_LD_IND must clear r0 typed state: {map:?}"
);
}
#[test]
fn single_exit_does_not_panic() {
let (blob, _t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(map.is_empty(), "single EXIT must yield empty map: {map:?}");
}
#[test]
fn jumps_only_program_does_not_panic() {
let (blob, _t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let jeq = mk_insn(BPF_CLASS_JMP | 0x10, 1, 0, 1, 0);
let ja_plus = mk_insn(BPF_CLASS_JMP, 0, 0, 1, 0);
let ja_minus = mk_insn(BPF_CLASS_JMP, 0, 0, -2, 0);
let insns = vec![jeq, ja_plus, ja_minus, exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(
map.is_empty(),
"all-jumps program must yield empty map: {map:?}"
);
}
fn ld_imm64(dst: u8, imm: i32) -> [BpfInsn; 2] {
let lo = mk_insn(BPF_CLASS_LD | BPF_SIZE_DW | BPF_MODE_IMM, dst, 0, 0, imm);
let hi = mk_insn(0, 0, 0, 0, 0);
[lo, hi]
}
fn btf_bss_with_kptr() -> (Vec<u8>, u32, u32, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "task_struct");
let n_x = push_name(&mut strings, "x");
let n_kptr = push_name(&mut strings, "my_kptr");
let n_bss = push_name(&mut strings, ".bss");
let n_kfunc = push_name(&mut strings, "bpf_task_acquire");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Var {
name_off: n_kptr,
type_id: 1,
linkage: 1,
},
SynType::Datasec {
name_off: n_bss,
size: 8,
entries: vec![SynVarSecinfo {
type_id: 4,
offset: 0,
size: 8,
}],
},
SynType::FuncProto {
return_type_id: 3,
params: vec![],
},
SynType::Func {
name_off: n_kfunc,
type_id: 6,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
(blob, 5, 2, 0, 7)
}
#[test]
fn bss_kptr_records_kernel_cast() {
let (blob, datasec_id, t_id, var_off, kfunc_id) = btf_bss_with_kptr();
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let stx_kptr = stx(BPF_SIZE_DW, 1, 0, 0);
let insns = vec![kfunc_call(kfunc_id), ld_lo, ld_hi, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 1,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(&insns, &btf, &[], &[], &datasec_pointers, &[]);
assert_eq!(
map.get(&(datasec_id, var_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kfunc-returned T* stored into .bss[my_kptr] must record \
(datasec_id, 0) -> (T, Kernel): {map:?}"
);
}
#[test]
fn ld_imm64_without_annotation_no_record() {
let (blob, _datasec_id, _t_id, var_off, kfunc_id) = btf_bss_with_kptr();
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let stx_kptr = stx(BPF_SIZE_DW, 1, 0, 0);
let insns = vec![kfunc_call(kfunc_id), ld_lo, ld_hi, stx_kptr, exit()];
let map = analyze_casts(&insns, &btf, &[], &[], &[], &[]);
assert!(
map.is_empty(),
"LD_IMM64 without DatasecPointer annotation must not record \
a kptr finding: {map:?}"
);
}
#[test]
fn bss_stx_with_untyped_value_no_record() {
let (blob, datasec_id, _t_id, var_off, _kfunc_id) = btf_bss_with_kptr();
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_zero = mov_k(0, 0);
let stx_kptr = stx(BPF_SIZE_DW, 1, 0, 0);
let insns = vec![ld_lo, ld_hi, mov_zero, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(&insns, &btf, &[], &[], &datasec_pointers, &[]);
assert!(
map.is_empty(),
"STX with untyped value register must not record kptr: {map:?}"
);
}
#[test]
fn bss_multi_variable_layout() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "task_struct");
let n_x = push_name(&mut strings, "x");
let n_a = push_name(&mut strings, "kptr_a");
let n_b = push_name(&mut strings, "kptr_b");
let n_bss = push_name(&mut strings, ".bss");
let n_kfunc = push_name(&mut strings, "bpf_task_acquire");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Var {
name_off: n_a,
type_id: 1,
linkage: 1,
},
SynType::Var {
name_off: n_b,
type_id: 1,
linkage: 1,
},
SynType::Datasec {
name_off: n_bss,
size: 24,
entries: vec![
SynVarSecinfo {
type_id: 4,
offset: 0,
size: 8,
},
SynVarSecinfo {
type_id: 5,
offset: 16,
size: 8,
},
],
},
SynType::FuncProto {
return_type_id: 3,
params: vec![],
},
SynType::Func {
name_off: n_kfunc,
type_id: 7,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let datasec_id = 6;
let t_id = 2;
let kfunc_id = 8;
let [ld_a_lo, ld_a_hi] = ld_imm64(1, 0);
let [ld_b_lo, ld_b_hi] = ld_imm64(2, 16);
let insns = vec![
kfunc_call(kfunc_id),
ld_a_lo,
ld_a_hi,
stx(BPF_SIZE_DW, 1, 0, 0),
kfunc_call(kfunc_id),
ld_b_lo,
ld_b_hi,
stx(BPF_SIZE_DW, 2, 0, 0),
exit(),
];
let datasec_pointers = vec![
DatasecPointer {
insn_offset: 1,
datasec_type_id: datasec_id,
base_offset: 0,
},
DatasecPointer {
insn_offset: 5,
datasec_type_id: datasec_id,
base_offset: 16,
},
];
let map = analyze_casts(&insns, &btf, &[], &[], &datasec_pointers, &[]);
assert_eq!(
map.get(&(datasec_id, 0)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kptr_a at offset 0: {map:?}"
);
assert_eq!(
map.get(&(datasec_id, 16)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"kptr_b at offset 16: {map:?}"
);
}
#[test]
fn struct_member_at_datasec_resolves_variables() {
let (blob, datasec_id, _t_id, _var_off, _kfunc_id) = btf_bss_with_kptr();
let btf = Btf::from_bytes(&blob).unwrap();
let m0 = struct_member_at(&btf, datasec_id, 0).expect("byte 0 must hit my_kptr");
match m0 {
MemberAt::Datasec {
var_byte_offset, ..
} => assert_eq!(var_byte_offset, 0),
MemberAt::Struct { .. } => panic!("Datasec parent must yield Datasec match"),
}
let m4 = struct_member_at(&btf, datasec_id, 4).expect("byte 4 must hit my_kptr range");
match m4 {
MemberAt::Datasec {
var_byte_offset, ..
} => assert_eq!(var_byte_offset, 0),
MemberAt::Struct { .. } => panic!("Datasec parent must yield Datasec match"),
}
assert!(
struct_member_at(&btf, datasec_id, 100).is_none(),
"byte 100 outside section must return None"
);
}
#[test]
fn end_to_end_bss_global_stores_kfunc_pointer() {
let (blob, datasec_id, t_id, var_off, kfunc_id) = btf_bss_with_kptr();
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let insns = vec![
kfunc_call(kfunc_id),
ld_lo,
ld_hi,
stx(BPF_SIZE_DW, 1, 0, 0),
exit(),
];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 1,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(&insns, &btf, &[], &[], &datasec_pointers, &[]);
assert_eq!(map.len(), 1, "exactly one finding expected: {map:?}");
assert_eq!(
map.get(&(datasec_id, var_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
);
}
#[test]
fn kfunc_call_imm_zero_leaves_r0_unknown() {
let slot_off: u32 = 16;
let (blob, _t_id, p_id, _t_ptr_id) = btf_kptr_base(slot_off);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
kfunc_call(0),
stx(BPF_SIZE_DW, 6, 0, slot_off as i16),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"kfunc_call imm=0 must leave R0 Unknown: {map:?}"
);
}
#[test]
fn jmp32_gotol_resets_state_at_target() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let gotol = mk_insn(BPF_CLASS_JMP32, 0, 0, 0, 1);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
gotol,
exit(),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(map.is_empty(), "JMP32|JA target must reset state: {map:?}");
}
#[test]
fn out_of_range_jump_targets_dropped() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let jeq_neg = mk_insn(BPF_CLASS_JMP | 0x10, 2, 0, -100, 0);
let jeq_pos = mk_insn(BPF_CLASS_JMP | 0x10, 2, 0, 100, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
jeq_neg,
jeq_pos,
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"out-of-range jumps must drop, state survives: {map:?}"
);
}
#[test]
fn all_conditional_jumps_register_targets() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let ops: [u8; 11] = [
0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0xa0, 0xb0, 0xc0, 0xd0,
];
for op in ops {
let cond = mk_insn(BPF_CLASS_JMP | op, 2, 0, 1, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
cond,
exit(),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"JMP op 0x{op:02x} target must reset state: {map:?}"
);
}
}
#[test]
fn func_entry_multiple_at_same_pc_last_wins() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let n_arg_t = push_name(&mut strings, "arg_t");
let n_arg_p = push_name(&mut strings, "arg_p");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::Ptr { type_id: 4 },
SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_arg_t,
type_id: 3,
},
SynParam {
name_off: n_arg_p,
type_id: 5,
},
],
},
SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_arg_p,
type_id: 5,
},
SynParam {
name_off: n_arg_t,
type_id: 3,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let proto_a = 6;
let proto_b = 7;
let insns = vec![stx(BPF_SIZE_DW, 1, 2, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[],
&[
FuncEntry {
insn_offset: 0,
func_proto_id: proto_a,
},
FuncEntry {
insn_offset: 0,
func_proto_id: proto_b,
},
],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"later FuncEntry at same PC must win: {map:?}"
);
}
#[test]
fn func_entry_past_insns_len_no_op() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[FuncEntry {
insn_offset: 999,
func_proto_id: 1,
}],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"FuncEntry past insns.len() must not affect run: {map:?}"
);
}
#[test]
fn func_entry_pc0_no_params_clears_initial_regs() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::FuncProto {
return_type_id: 0,
params: vec![],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let proto_id = 4;
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[FuncEntry {
insn_offset: 0,
func_proto_id: proto_id,
}],
&[],
&[],
);
assert!(
map.is_empty(),
"FuncEntry with empty params must clear all regs: {map:?}"
);
}
#[test]
fn func_entry_proto_id_zero_clears_regs_no_seed() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[FuncEntry {
insn_offset: 0,
func_proto_id: 0,
}],
&[],
&[],
);
assert!(
map.is_empty(),
"FuncEntry with proto_id=0 must clear regs and not seed: {map:?}"
);
}
#[test]
fn func_entry_pc_gt_0_reseeds_mid_stream() {
let slot_off: u32 = 16;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_p = push_name(&mut strings, "P");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let n_arg_t = push_name(&mut strings, "arg_t");
let n_arg_p = push_name(&mut strings, "arg_p");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_p,
size: slot_off + 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: slot_off,
}],
},
SynType::Ptr { type_id: 4 },
SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_arg_t,
type_id: 3,
},
SynParam {
name_off: n_arg_p,
type_id: 5,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 2;
let p_id = 4;
let proto_id = 6;
let insns = vec![exit(), stx(BPF_SIZE_DW, 2, 1, slot_off as i16), exit()];
let map = analyze_casts(
&insns,
&btf,
&[],
&[FuncEntry {
insn_offset: 1,
func_proto_id: proto_id,
}],
&[],
&[],
);
assert_eq!(
map.get(&(p_id, slot_off)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"FuncEntry at PC>0 must reseed: {map:?}"
);
}
#[test]
fn ld_imm64_second_slot_with_non_zero_content_skipped() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let ld_imm64_lo = mk_insn(BPF_CLASS_LD | BPF_SIZE_DW | BPF_MODE_IMM, 6, 0, 0, 42);
let fake_mov = mk_insn(BPF_CLASS_ALU64 | BPF_OP_MOV | BPF_SRC_X, 4, 3, 0, 0);
let insns = vec![
ld_imm64_lo,
fake_mov,
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"non-zero LD_IMM64 second slot must skip: {map:?}"
);
}
#[test]
fn initial_reg_duplicate_seeds_last_wins() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_s1 = push_name(&mut strings, "S1");
let n_s2 = push_name(&mut strings, "S2");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_s1,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_s2,
size: 24,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 16,
}],
},
SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let s1_id = 2;
let s2_id = 3;
let q_id = 4;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 16),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: s1_id,
},
InitialReg {
reg: 1,
struct_type_id: s2_id,
},
],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(s2_id, 16)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"duplicate InitialReg seed must use last value: {map:?}"
);
assert!(
!map.contains_key(&(s1_id, 16)),
"first InitialReg seed must NOT take effect: {map:?}"
);
}
#[test]
fn initial_reg_struct_type_id_zero_dropped() {
let (blob, _t, _q) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: 0,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"InitialReg with struct_type_id=0 must be dropped: {map:?}"
);
}
#[test]
fn large_program_buried_cast_recorded() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let mut insns: Vec<BpfInsn> = Vec::with_capacity(10_001);
for _ in 0..4_999 {
insns.push(mov_k(0, 0));
}
insns.push(ldx(BPF_SIZE_DW, 2, 1, 8));
insns.push(addr_space_cast(2, 2, 1));
insns.push(ldx(BPF_SIZE_DW, 3, 2, 0));
for _ in 0..4_997 {
insns.push(mov_k(0, 0));
}
insns.push(exit());
assert_eq!(insns.len(), 10_000);
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"exactly one cast in 10k-insn program: {map:?}"
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"buried cast must resolve: {map:?}"
);
}
#[test]
fn many_func_entries_each_seeds() {
const N: usize = 100;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_arg_t = push_name(&mut strings, "task");
let n_arg_p = push_name(&mut strings, "parent");
let mut t_name_offs = Vec::with_capacity(N);
for i in 0..N {
t_name_offs.push(push_name(&mut strings, &format!("T{i}")));
}
let mut slot_name_offs = Vec::with_capacity(N);
for i in 0..N {
slot_name_offs.push(push_name(&mut strings, &format!("slot{i}")));
}
let mut types: Vec<SynType> = Vec::new();
types.push(SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
});
for &name_off in t_name_offs.iter().take(N) {
types.push(SynType::Struct {
name_off,
size: 8,
members: vec![SynMember {
name_off: 0,
type_id: 1,
byte_offset: 0,
}],
});
}
for i in 0..N {
types.push(SynType::Ptr {
type_id: (2 + i) as u32,
});
}
let p_size: u32 = 8 * (N as u32);
let p_members: Vec<SynMember> = (0..N)
.map(|i| SynMember {
name_off: slot_name_offs[i],
type_id: 1,
byte_offset: 8 * i as u32,
})
.collect();
types.push(SynType::Struct {
name_off: n_p,
size: p_size,
members: p_members,
});
let p_id: u32 = 2 + 2 * N as u32;
types.push(SynType::Ptr { type_id: p_id });
let p_ptr_id: u32 = 2 * N as u32 + 3;
for i in 0..N {
types.push(SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_arg_t,
type_id: (N as u32 + 2 + i as u32),
},
SynParam {
name_off: n_arg_p,
type_id: p_ptr_id,
},
],
});
}
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let mut insns: Vec<BpfInsn> = Vec::with_capacity(N + 1);
let mut func_entries: Vec<FuncEntry> = Vec::with_capacity(N);
for i in 0..N {
insns.push(stx(BPF_SIZE_DW, 2, 1, (8 * i) as i16));
let proto_id: u32 = 2 * N as u32 + 4 + i as u32;
func_entries.push(FuncEntry {
insn_offset: i,
func_proto_id: proto_id,
});
}
insns.push(exit());
let map = analyze_casts(&insns, &btf, &[], &func_entries, &[], &[]);
assert_eq!(map.len(), N, "expected {N} kptr findings: {map:?}");
for i in 0..N {
let t_id = (2 + i) as u32;
assert_eq!(
map.get(&(p_id, 8 * i as u32)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"FuncEntry #{i} at PC {i} must record (P, {}) -> T{i}: {map:?}",
8 * i as u32
);
}
}
#[test]
fn many_struct_types_unique_match_resolves() {
const N_FILLER: usize = 499;
let mut strings: Vec<u8> = vec![0];
let n_u32 = push_name(&mut strings, "u32");
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_qtarget = push_name(&mut strings, "Qtarget");
let n_filler_a = push_name(&mut strings, "a");
let n_filler_b = push_name(&mut strings, "b");
let n_f = push_name(&mut strings, "f");
let mut filler_name_offs = Vec::with_capacity(N_FILLER);
for i in 0..N_FILLER {
filler_name_offs.push(push_name(&mut strings, &format!("Q{i}")));
}
let mut types: Vec<SynType> = vec![
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 2,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_qtarget,
size: 84,
members: vec![
SynMember {
name_off: n_filler_a,
type_id: 2,
byte_offset: 40,
},
SynMember {
name_off: n_filler_b,
type_id: 1,
byte_offset: 80,
},
],
},
];
for &name_off in filler_name_offs.iter().take(N_FILLER) {
types.push(SynType::Struct {
name_off,
size: 8,
members: vec![SynMember {
name_off: n_filler_a,
type_id: 2,
byte_offset: 0,
}],
});
}
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id: u32 = 3;
let qtarget_id: u32 = 4;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 40),
ldx(BPF_SIZE_W, 4, 2, 80),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.len(),
1,
"single unique cast across 500 candidates: {map:?}"
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: qtarget_id,
addr_space: AddrSpace::Arena,
}),
"unique match must resolve to Qtarget: {map:?}"
);
}
#[test]
fn deep_modifier_chain_resolves_to_u64() {
const CHAIN_LEN: usize = 30;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_x = push_name(&mut strings, "x");
let n_typedef = push_name(&mut strings, "alias_t");
let mut types: Vec<SynType> = Vec::new();
types.push(SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
});
for i in 0..CHAIN_LEN {
let inner_id = 1 + i as u32;
let kind = i % 3;
let chain_node = match kind {
0 => SynType::Typedef {
name_off: n_typedef,
type_id: inner_id,
},
1 => SynType::Const { type_id: inner_id },
_ => SynType::Volatile { type_id: inner_id },
};
types.push(chain_node);
}
let chain_head_id: u32 = (CHAIN_LEN as u32) + 1;
types.push(SynType::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: chain_head_id,
byte_offset: 8,
}],
});
types.push(SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
});
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id: u32 = chain_head_id + 1;
let q_id: u32 = chain_head_id + 2;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"30-level modifier chain must peel to u64 and seed cast: {map:?}"
);
}
#[test]
fn maximum_stack_slots_all_recorded() {
const N: usize = 64;
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let mut t_names = Vec::with_capacity(N);
let mut slot_names = Vec::with_capacity(N);
let mut kfunc_names = Vec::with_capacity(N);
for i in 0..N {
t_names.push(push_name(&mut strings, &format!("T{i}")));
slot_names.push(push_name(&mut strings, &format!("slot{i}")));
kfunc_names.push(push_name(&mut strings, &format!("kfunc_acquire_{i}")));
}
let mut types: Vec<SynType> = Vec::new();
types.push(SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
});
for &name_off in t_names.iter().take(N) {
types.push(SynType::Struct {
name_off,
size: 8,
members: vec![SynMember {
name_off: 0,
type_id: 1,
byte_offset: 0,
}],
});
}
for i in 0..N {
types.push(SynType::Ptr {
type_id: (2 + i) as u32,
});
}
let p_members: Vec<SynMember> = (0..N)
.map(|i| SynMember {
name_off: slot_names[i],
type_id: 1,
byte_offset: 8 * i as u32,
})
.collect();
types.push(SynType::Struct {
name_off: n_p,
size: 8 * N as u32,
members: p_members,
});
for i in 0..N {
types.push(SynType::FuncProto {
return_type_id: (N as u32 + 2 + i as u32),
params: vec![],
});
}
for (i, &name_off) in kfunc_names.iter().enumerate().take(N) {
types.push(SynType::Func {
name_off,
type_id: (2 * N as u32 + 3 + i as u32),
linkage: 1,
});
}
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id: u32 = 2 * N as u32 + 2;
let mut insns: Vec<BpfInsn> = Vec::with_capacity(4 * N + 1);
for i in 0..N {
let func_id: u32 = 3 * N as u32 + 3 + i as u32;
insns.push(kfunc_call(func_id));
insns.push(stx(BPF_SIZE_DW, 10, 0, -((i as i16 + 1) * 8)));
}
for i in 0..N {
insns.push(ldx(BPF_SIZE_DW, 3, 10, -((i as i16 + 1) * 8)));
insns.push(stx(BPF_SIZE_DW, 6, 3, (8 * i) as i16));
}
insns.push(exit());
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[],
);
assert_eq!(map.len(), N, "expected {N} kptr findings: {map:?}");
for i in 0..N {
let t_id: u32 = (2 + i) as u32;
assert_eq!(
map.get(&(p_id, 8 * i as u32)),
Some(&CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Kernel,
}),
"stack slot {i} (off={}) must record T{i}: {map:?}",
-((i as i16 + 1) * 8)
);
}
}
#[test]
fn many_field_struct_records_two_distinct_casts() {
const N: u32 = 100;
let mut strings: Vec<u8> = vec![0];
let n_u8 = push_name(&mut strings, "u8");
let n_u32 = push_name(&mut strings, "u32");
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q50 = push_name(&mut strings, "Q50");
let n_q99 = push_name(&mut strings, "Q99");
let n_x = push_name(&mut strings, "x");
let mut t_field_names = Vec::with_capacity(N as usize);
for i in 0..N {
t_field_names.push(push_name(&mut strings, &format!("f{i}")));
}
let t_members: Vec<SynMember> = (0..N)
.map(|i| SynMember {
name_off: t_field_names[i as usize],
type_id: 3,
byte_offset: 8 * i,
})
.collect();
let types = vec![
SynType::Int {
name_off: n_u8,
size: 1,
encoding: 0,
offset: 0,
bits: 8,
},
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8 * N,
members: t_members,
},
SynType::Struct {
name_off: n_q50,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 2,
byte_offset: 4,
}],
},
SynType::Struct {
name_off: n_q99,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 5,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id: u32 = 4;
let q50_id: u32 = 5;
let q99_id: u32 = 6;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 400),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_W, 3, 2, 4),
ldx(BPF_SIZE_DW, 2, 1, 792),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_B, 4, 2, 5),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(map.len(), 2, "two distinct casts expected: {map:?}");
assert_eq!(
map.get(&(t_id, 400)),
Some(&CastHit {
alloc_size: None,
target_type_id: q50_id,
addr_space: AddrSpace::Arena,
}),
"f50 at offset 400: {map:?}"
);
assert_eq!(
map.get(&(t_id, 792)),
Some(&CastHit {
alloc_size: None,
target_type_id: q99_id,
addr_space: AddrSpace::Arena,
}),
"f99 at offset 792: {map:?}"
);
}
#[test]
fn many_cast_patterns_in_one_program() {
const N: u32 = 20;
let mut strings: Vec<u8> = vec![0];
let n_u8 = push_name(&mut strings, "u8");
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_x = push_name(&mut strings, "x");
let mut t_field_names = Vec::with_capacity(N as usize);
let mut q_names = Vec::with_capacity(N as usize);
for i in 0..N {
t_field_names.push(push_name(&mut strings, &format!("f{i}")));
q_names.push(push_name(&mut strings, &format!("Q{i}")));
}
let t_members: Vec<SynMember> = (0..N)
.map(|i| SynMember {
name_off: t_field_names[i as usize],
type_id: 2,
byte_offset: 8 * i,
})
.collect();
let mut types: Vec<SynType> = vec![
SynType::Int {
name_off: n_u8,
size: 1,
encoding: 0,
offset: 0,
bits: 8,
},
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8 * N,
members: t_members,
},
];
for i in 0..N {
types.push(SynType::Struct {
name_off: q_names[i as usize],
size: i + 2, members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: i + 1,
}],
});
}
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id: u32 = 3;
let mut insns: Vec<BpfInsn> = Vec::with_capacity(3 * N as usize + 1);
for i in 0..N {
insns.push(ldx(BPF_SIZE_DW, 2, 1, (8 * i) as i16));
insns.push(addr_space_cast(2, 2, 1));
insns.push(ldx(BPF_SIZE_B, 3, 2, (i + 1) as i16));
}
insns.push(exit());
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(map.len(), N as usize, "expected {N} cast patterns: {map:?}");
for i in 0..N {
let q_id: u32 = 4 + i;
assert_eq!(
map.get(&(t_id, 8 * i)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"pattern #{i} at (T, {}) must resolve to Q{i}: {map:?}",
8 * i
);
}
}
#[test]
fn empty_btf_no_panic() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let types = vec![SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
}];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: 1,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"no struct types in BTF must produce empty CastMap: {map:?}"
);
}
#[test]
fn btf_only_ints_no_panic() {
let mut strings: Vec<u8> = vec![0];
let n_u8 = push_name(&mut strings, "u8");
let n_u16 = push_name(&mut strings, "u16");
let n_u32 = push_name(&mut strings, "u32");
let n_u64 = push_name(&mut strings, "u64");
let n_s32 = push_name(&mut strings, "s32");
const BTF_INT_SIGNED: u32 = 1;
let types = vec![
SynType::Int {
name_off: n_u8,
size: 1,
encoding: 0,
offset: 0,
bits: 8,
},
SynType::Int {
name_off: n_u16,
size: 2,
encoding: 0,
offset: 0,
bits: 16,
},
SynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Int {
name_off: n_s32,
size: 4,
encoding: BTF_INT_SIGNED,
offset: 0,
bits: 32,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 2, 1, 8), ldx(BPF_SIZE_DW, 3, 2, 0), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: 4,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"Int-only BTF must produce empty CastMap: {map:?}"
);
}
#[test]
fn stx_flow_alloc_return_records_arena_finding() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_cgx = push_name(&mut strings, "cgx_raw");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 2;
let pseudo_call = mk_insn(
BPF_CLASS_JMP | BPF_OP_CALL,
0,
BPF_PSEUDO_CALL,
0,
0, );
let insns = vec![pseudo_call, stx(BPF_SIZE_DW, 6, 0, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"STX-flow alloc-return must record Arena finding with \
target_type_id=0: {map:?}"
);
}
#[test]
fn stx_flow_alloc_return_propagates_through_mov() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_cgx = push_name(&mut strings, "cgx_raw");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 2;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![pseudo_call, mov_x(7, 0), stx(BPF_SIZE_DW, 6, 7, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"MOV must propagate ArenaU64FromAlloc through r7: {map:?}"
);
}
#[test]
fn stx_flow_alloc_return_round_trips_through_stack() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_cgx = push_name(&mut strings, "cgx_raw");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 2;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
pseudo_call,
stx(BPF_SIZE_DW, 10, 0, -8),
ldx(BPF_SIZE_DW, 7, 10, -8),
stx(BPF_SIZE_DW, 6, 7, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"Stack spill/reload must round-trip ArenaU64FromAlloc: {map:?}"
);
}
#[test]
fn stx_flow_alias_tracking_propagates_via_ldx() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_src = push_name(&mut strings, "src_slot");
let n_dst = push_name(&mut strings, "dst_slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![
SynMember {
name_off: n_src,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_dst,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 2;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 0),
ldx(BPF_SIZE_DW, 7, 6, 0),
stx(BPF_SIZE_DW, 6, 7, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert_eq!(
map.get(&(m_id, 0)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"first STX must record (M, 0) -> Arena: {map:?}"
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"alias-tracked LDX from (M, 0) must propagate to (M, 8) STX: {map:?}"
);
}
#[test]
fn stx_flow_conflict_with_kptr_drops_both() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_m = push_name(&mut strings, "M");
let n_x = push_name(&mut strings, "x");
let n_slot = push_name(&mut strings, "slot");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_t,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_m,
size: 8,
members: vec![SynMember {
name_off: n_slot,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 4;
let t_id = 2;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 0),
stx(BPF_SIZE_DW, 6, 7, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 6,
struct_type_id: m_id,
},
InitialReg {
reg: 7,
struct_type_id: t_id,
},
],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert!(
!map.contains_key(&(m_id, 0)),
"arena/kptr conflict must drop the slot from output: {map:?}"
);
}
#[test]
fn stx_flow_resolves_target_via_shape_inference_under_alias_tracking() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_q = push_name(&mut strings, "Q");
let n_f = push_name(&mut strings, "f");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_p,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id = 2;
let q_id = 3;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_DW, 4, 2, 8),
mov_x(6, 1),
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: p_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 4,
}],
);
assert_eq!(
map.get(&(p_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"STX-flow gates emission past F1; shape inference resolves \
target_type_id from the recorded access pattern (Q is the \
only struct of size 16 with u64@0 and u64@8): {map:?}"
);
}
fn btf_with_arena_alloc_kfunc(func_name: &str) -> (Vec<u8>, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_cgx = push_name(&mut strings, "cgx_raw");
let n_func = push_name(&mut strings, func_name);
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Ptr { type_id: 0 },
SynType::FuncProto {
return_type_id: 3,
params: vec![],
},
SynType::Func {
name_off: n_func,
type_id: 4,
linkage: 2, },
];
let blob = build_btf(&types, &strings);
(blob, 2, 5)
}
#[test]
fn kfunc_arena_alloc_allowlist_records_arena_finding() {
let (blob, m_id, kfunc_id) = btf_with_arena_alloc_kfunc("bpf_arena_alloc_pages");
let btf = Btf::from_bytes(&blob).unwrap();
let kfunc_call = mk_insn(
BPF_CLASS_JMP | BPF_OP_CALL,
0,
BPF_PSEUDO_KFUNC_CALL,
0,
kfunc_id as i32,
);
let insns = vec![kfunc_call, stx(BPF_SIZE_DW, 6, 0, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"allowlisted kfunc with `Ptr -> Void` return must seed R0 \
as ArenaU64FromAlloc; subsequent STX must record an Arena \
finding: {map:?}"
);
}
#[test]
fn kfunc_arena_alloc_typed_return_falls_through() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_cgx = push_name(&mut strings, "cgx_raw");
let n_r = push_name(&mut strings, "R");
let n_x = push_name(&mut strings, "x");
let n_func = push_name(&mut strings, "bpf_arena_alloc_pages");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_r,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 3 },
SynType::FuncProto {
return_type_id: 4,
params: vec![],
},
SynType::Func {
name_off: n_func,
type_id: 5,
linkage: 2,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 2;
let kfunc_id = 6;
let kfunc_call = mk_insn(
BPF_CLASS_JMP | BPF_OP_CALL,
0,
BPF_PSEUDO_KFUNC_CALL,
0,
kfunc_id,
);
let insns = vec![kfunc_call, stx(BPF_SIZE_DW, 6, 0, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 3, addr_space: AddrSpace::Kernel,
}),
"kfunc whose return is typed (Ptr -> Struct) must take the \
typed-pointer arm, NOT the arena allocator arm; the \
allowlist arm must not produce a false-positive Arena \
finding: {map:?}"
);
}
#[test]
fn kfunc_arena_alloc_non_allowlist_name_drops() {
let (blob, m_id, kfunc_id) = btf_with_arena_alloc_kfunc("ktstr_unlisted_kfunc");
let btf = Btf::from_bytes(&blob).unwrap();
let kfunc_call = mk_insn(
BPF_CLASS_JMP | BPF_OP_CALL,
0,
BPF_PSEUDO_KFUNC_CALL,
0,
kfunc_id as i32,
);
let insns = vec![kfunc_call, stx(BPF_SIZE_DW, 6, 0, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: m_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"kfunc with `Ptr -> Void` return but non-allowlist name must \
NOT seed an arena finding: {map:?}"
);
}
fn helper_call(helper_id: i32) -> BpfInsn {
mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, 0, 0, helper_id)
}
enum MapValueShape {
Struct,
U64,
Typedef,
#[allow(dead_code)] Void,
}
#[test]
fn helper_map_lookup_elem_typed_value_seeds_r0() {
let (blob, datasec_id, var_off, value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Struct);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0); let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert_eq!(
map.get(&(parent_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: value_sid,
addr_space: AddrSpace::Kernel,
}),
"lookup-derived R0 stored into task_ctx.cgx_raw must record \
(task_ctx, 8) -> (cbw_cgrp_entry, Kernel): {map:?}"
);
}
#[test]
fn helper_map_lookup_elem_value_type_unresolvable_keeps_r0_unknown() {
let (blob, datasec_id, var_off, _value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::U64);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert!(
map.is_empty(),
"stat-counter map (`__type(value, u64)`) must keep R0 Unknown \
so the STX records nothing: {map:?}"
);
}
#[test]
fn helper_map_lookup_elem_value_type_void_keeps_r0_unknown() {
let (blob, datasec_id, var_off, _value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Void);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert!(
map.is_empty(),
"void-value map (`Ptr -> Void`) must keep R0 Unknown: {map:?}"
);
}
#[test]
fn helper_map_lookup_elem_value_type_struct_via_typedef() {
let (blob, datasec_id, var_off, value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Typedef);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert_eq!(
map.get(&(parent_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: value_sid,
addr_space: AddrSpace::Kernel,
}),
"typedef-wrapped value type must peel to the underlying struct id: {map:?}"
);
}
#[test]
fn helper_map_lookup_elem_no_map_metadata_keeps_r0_unknown() {
let (blob, _datasec_id, var_off, _value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Struct);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, stx_kptr, exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"without DatasecPointer annotation R1 stays Unknown so the \
helper-return arm cannot type R0: {map:?}"
);
}
#[test]
fn helper_not_in_allowlist_keeps_r0_unknown() {
let (blob, datasec_id, var_off, _value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Struct);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_other = helper_call(35);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_other, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert!(
map.is_empty(),
"non-bpf_map_lookup_elem helper must not type R0 even with \
a valid `.maps` R1: {map:?}"
);
}
#[test]
fn helper_imm_negative_or_zero_keeps_r0_unknown() {
let (blob, datasec_id, var_off, _value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Struct);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
{
let call_zero = helper_call(0);
let insns = vec![ld_lo, ld_hi, mov_key, call_zero, stx_kptr, exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert!(
map.is_empty(),
"helper id 0 (BPF_FUNC_unspec) must not seed R0: {map:?}"
);
}
{
let call_neg = helper_call(-1);
let insns = vec![ld_lo, ld_hi, mov_key, call_neg, stx_kptr, exit()];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert!(map.is_empty(), "helper id -1 must not seed R0: {map:?}");
}
}
#[test]
fn stx_through_helper_returned_pointer_records_finding() {
let (blob, datasec_id, var_off, value_sid, parent_id) =
btf_with_maps_and_task_ctx(MapValueShape::Struct);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let mov_r7 = mov_x(7, 0);
let stx_kptr = stx(BPF_SIZE_DW, 6, 7, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, mov_r7, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert_eq!(
map.get(&(parent_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: value_sid,
addr_space: AddrSpace::Kernel,
}),
"lookup -> mov -> stx into task_ctx.cgx_raw must record a kernel \
cast finding: {map:?}"
);
}
fn btf_with_maps_and_task_ctx(value_kind: MapValueShape) -> (Vec<u8>, u32, u32, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_entry = push_name(&mut strings, "cbw_cgrp_entry");
let n_cgx = push_name(&mut strings, "cgx");
let n_value = push_name(&mut strings, "value");
let n_type = push_name(&mut strings, "type");
let n_map_def = push_name(&mut strings, "anon_map_def");
let n_map_var = push_name(&mut strings, "cbw_cgrp_map");
let n_maps = push_name(&mut strings, ".maps");
let n_entry_typedef = push_name(&mut strings, "cbw_cgrp_entry_t");
let n_task_ctx = push_name(&mut strings, "task_ctx");
let n_cgx_raw = push_name(&mut strings, "cgx_raw");
let mut types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_entry,
size: 8,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_task_ctx,
size: 16,
members: vec![SynMember {
name_off: n_cgx_raw,
type_id: 1,
byte_offset: 8,
}],
},
];
let parent_id = 3u32;
let (value_ptr_id, expected_struct_id) = match value_kind {
MapValueShape::Struct => {
types.push(SynType::Ptr { type_id: 2 });
(4u32, 2u32)
}
MapValueShape::U64 => {
types.push(SynType::Ptr { type_id: 1 });
(4u32, 0u32)
}
MapValueShape::Typedef => {
types.push(SynType::Ptr { type_id: 5 });
types.push(SynType::Typedef {
name_off: n_entry_typedef,
type_id: 2,
});
(4u32, 2u32)
}
MapValueShape::Void => {
types.push(SynType::Ptr { type_id: 0 });
(4u32, 0u32)
}
};
let map_def_id = types.len() as u32 + 1;
types.push(SynType::Struct {
name_off: n_map_def,
size: 16,
members: vec![
SynMember {
name_off: n_type,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_value,
type_id: value_ptr_id,
byte_offset: 8,
},
],
});
let map_var_id = map_def_id + 1;
types.push(SynType::Var {
name_off: n_map_var,
type_id: map_def_id,
linkage: 1,
});
let datasec_id = map_var_id + 1;
types.push(SynType::Datasec {
name_off: n_maps,
size: 16,
entries: vec![SynVarSecinfo {
type_id: map_var_id,
offset: 0,
size: 16,
}],
});
let blob = build_btf(&types, &strings);
(blob, datasec_id, 0, expected_struct_id, parent_id)
}
#[test]
fn helper_map_lookup_elem_non_dot_maps_datasec_drops() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_entry = push_name(&mut strings, "cbw_cgrp_entry");
let n_cgx = push_name(&mut strings, "cgx");
let n_value = push_name(&mut strings, "value");
let n_type = push_name(&mut strings, "type");
let n_map_def = push_name(&mut strings, "anon_map_def");
let n_map_var = push_name(&mut strings, "fake_map");
let n_bss = push_name(&mut strings, ".bss");
let n_task_ctx = push_name(&mut strings, "task_ctx");
let n_cgx_raw = push_name(&mut strings, "cgx_raw");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_entry,
size: 8,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_task_ctx,
size: 16,
members: vec![SynMember {
name_off: n_cgx_raw,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_map_def,
size: 16,
members: vec![
SynMember {
name_off: n_type,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_value,
type_id: 4,
byte_offset: 8,
},
],
},
SynType::Var {
name_off: n_map_var,
type_id: 5,
linkage: 1,
},
SynType::Datasec {
name_off: n_bss,
size: 16,
entries: vec![SynVarSecinfo {
type_id: 6,
offset: 0,
size: 16,
}],
},
];
let parent_id = 3u32;
let datasec_id = 7u32;
let var_off = 0u32;
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let [ld_lo, ld_hi] = ld_imm64(1, var_off as i32);
let mov_key = mov_k(2, 0);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let stx_kptr = stx(BPF_SIZE_DW, 6, 0, 8);
let insns = vec![ld_lo, ld_hi, mov_key, call_lookup, stx_kptr, exit()];
let datasec_pointers = vec![DatasecPointer {
insn_offset: 0,
datasec_type_id: datasec_id,
base_offset: var_off,
}];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: parent_id,
}],
&[],
&datasec_pointers,
&[],
);
assert!(
map.is_empty(),
"non-`.maps` datasec must not drive the helper-return arm even \
with a structurally matching map-def shape: {map:?}"
);
}
#[test]
fn empty_access_pattern_does_not_trigger_conflict_with_kptr() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let insns = vec![ldx(BPF_SIZE_DW, 3, 1, 8), stx(BPF_SIZE_DW, 1, 5, 8), exit()];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 5,
struct_type_id: q_id,
},
],
&[],
&[],
&[],
);
assert!(
map.contains_key(&(t_id, 8)),
"kptr finding on slot with empty-access pattern (LDX without deref) \
must NOT be dropped by conflict detection: {map:?}"
);
}
#[test]
fn only_ld_imm64_no_oob() {
const N_PAIRS: usize = 50;
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let mut insns: Vec<BpfInsn> = Vec::with_capacity(2 * N_PAIRS + 1);
let lo = mk_insn(BPF_CLASS_LD | BPF_SIZE_DW | BPF_MODE_IMM, 2, 0, 0, 0);
let hi = mk_insn(0, 0, 0, 0, 0);
for _ in 0..N_PAIRS {
insns.push(lo);
insns.push(hi);
}
insns.push(exit());
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"all-LD_IMM64 stream must produce no findings, no OOB panic: {map:?}"
);
}
#[test]
fn arena_stx_pending_then_duplicate_is_idempotent() {
let (blob, t_id, _q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 8),
stx(BPF_SIZE_DW, 6, 0, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: t_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert!(
!map.is_empty(),
"duplicate STX to same slot must not conflict; map: {map:?}"
);
}
#[test]
fn three_way_conflict_arena_kptr_pattern_drops_all() {
let (blob, t_id, q_id) = btf_with_source_and_target(8, 0);
let btf = Btf::from_bytes(&blob).unwrap();
let cast = addr_space_cast(3, 2, 1);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
cast,
stx(BPF_SIZE_DW, 1, 3, 8),
stx(BPF_SIZE_DW, 1, 5, 8),
ldx(BPF_SIZE_DW, 6, 1, 8),
ldx(BPF_SIZE_DW, 7, 6, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[
InitialReg {
reg: 1,
struct_type_id: t_id,
},
InitialReg {
reg: 5,
struct_type_id: q_id,
},
],
&[],
&[],
&[],
);
assert!(
map.is_empty(),
"arena + kptr + pattern on same slot must all drop: {map:?}"
);
}
#[test]
fn struct_member_at_resolves_array_element_offset() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_q = push_name(&mut strings, "Q");
let n_history = push_name(&mut strings, "history");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Array {
type_id: 1,
index_type_id: 1,
nelems: 4,
},
SynType::Struct {
name_off: n_t,
size: 32,
members: vec![SynMember {
name_off: n_history,
type_id: 2,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let t_id = 3u32;
let q_id = 4u32;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 16),
addr_space_cast(4, 2, 1),
stx(BPF_SIZE_DW, 1, 4, 16),
ldx(BPF_SIZE_DW, 3, 4, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
map.get(&(t_id, 16)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"(T={t_id}, 16) — third u64 element of `history[4]` — must \
appear in the cast map with target=Q ({q_id}) after \
struct_member_at peels the array member type to `u64`: \
{map:?}"
);
}
#[test]
fn stx_nested_struct_arena_finding_keys_on_inner() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_inner = push_name(&mut strings, "Inner");
let n_cgx = push_name(&mut strings, "cgx_raw");
let n_llcx = push_name(&mut strings, "llcx_raw");
let n_outer = push_name(&mut strings, "Outer");
let n_inner_field = push_name(&mut strings, "inner");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_inner,
size: 16,
members: vec![
SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_llcx,
type_id: 1,
byte_offset: 8,
},
],
},
SynType::Struct {
name_off: n_outer,
size: 16,
members: vec![SynMember {
name_off: n_inner_field,
type_id: 2,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let inner_id: u32 = 2;
let outer_id: u32 = 3;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 0),
mov_x(7, 0),
stx(BPF_SIZE_DW, 6, 7, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: outer_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
assert_eq!(
map.get(&(inner_id, 0)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"nested struct STX must key on (Inner, 0) not (Outer, 0): {map:?}"
);
assert_eq!(
map.get(&(inner_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"nested struct STX must key on (Inner, 8) not (Outer, 8): {map:?}"
);
assert!(
!map.contains_key(&(outer_id, 0)),
"outer struct id must NOT appear as key: {map:?}"
);
assert!(
!map.contains_key(&(outer_id, 8)),
"outer struct id must NOT appear as key: {map:?}"
);
}
#[test]
fn ldx_nested_struct_loads_inner_key() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_inner = push_name(&mut strings, "Inner");
let n_field = push_name(&mut strings, "ptr_field");
let n_outer = push_name(&mut strings, "Outer");
let n_embed = push_name(&mut strings, "embed");
let n_target = push_name(&mut strings, "Target");
let n_x = push_name(&mut strings, "x");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_inner,
size: 16,
members: vec![SynMember {
name_off: n_field,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_outer,
size: 16,
members: vec![SynMember {
name_off: n_embed,
type_id: 2,
byte_offset: 0,
}],
},
SynType::Struct {
name_off: n_target,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let inner_id: u32 = 2;
let outer_id: u32 = 3;
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
addr_space_cast(2, 2, 1),
ldx(BPF_SIZE_DW, 3, 2, 0),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: outer_id,
}],
&[],
&[],
&[],
);
assert!(
map.contains_key(&(inner_id, 8)),
"nested LDX + deref must key on (Inner={inner_id}, 8) \
not (Outer={outer_id}, 8): {map:?}"
);
assert!(
!map.contains_key(&(outer_id, 8)),
"outer id must NOT appear as key for nested member: {map:?}"
);
}
#[test]
fn cross_function_u64_param_inherits_caller_pointer_type() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_m = push_name(&mut strings, "M");
let n_cgx = push_name(&mut strings, "cgx_raw");
let n_caller = push_name(&mut strings, "caller");
let n_callee = push_name(&mut strings, "callee");
let n_taskc_raw = push_name(&mut strings, "taskc_raw");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_m,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
SynType::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: n_taskc_raw,
type_id: 1,
}],
},
SynType::Func {
name_off: n_callee,
type_id: 3,
linkage: 1,
},
SynType::Func {
name_off: n_caller,
type_id: 3,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let m_id = 2;
let insns = vec![
mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 2),
exit(),
exit(),
mov_x(6, 1),
mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0),
stx(BPF_SIZE_DW, 6, 0, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: m_id,
}],
&[FuncEntry {
insn_offset: 3,
func_proto_id: 3,
}],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 4,
}],
);
assert_eq!(
map.get(&(m_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"cross-function u64 param must inherit caller's Pointer{{M}} \
and record arena STX at (M, 8): {map:?}"
);
}
#[test]
fn helper_map_update_then_lookup_propagates_arena_through_map_value() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_v = push_name(&mut strings, "V");
let n_v_field = push_name(&mut strings, "field");
let n_p = push_name(&mut strings, "P");
let n_p_field = push_name(&mut strings, "field");
let n_type = push_name(&mut strings, "type");
let n_value = push_name(&mut strings, "value");
let n_map_def = push_name(&mut strings, "anon_map_def");
let n_map_var = push_name(&mut strings, "the_map");
let n_maps = push_name(&mut strings, ".maps");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_v,
size: 16,
members: vec![SynMember {
name_off: n_v_field,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_p,
size: 8,
members: vec![SynMember {
name_off: n_p_field,
type_id: 1,
byte_offset: 0,
}],
},
SynType::Ptr { type_id: 2 },
SynType::Struct {
name_off: n_map_def,
size: 16,
members: vec![
SynMember {
name_off: n_type,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_value,
type_id: 4,
byte_offset: 8,
},
],
},
SynType::Var {
name_off: n_map_var,
type_id: 5,
linkage: 1,
},
SynType::Datasec {
name_off: n_maps,
size: 16,
entries: vec![SynVarSecinfo {
type_id: 6,
offset: 0,
size: 16,
}],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let v_id = 2u32;
let p_id = 3u32;
let datasec_id = 7u32;
let var_off = 0u32;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let stx_spill = stx(BPF_SIZE_DW, 10, 0, -16);
let [ld_lo_pre, ld_hi_pre] = ld_imm64(1, var_off as i32);
let mov_r2_from_r10 = mov_x(2, 10);
let r2_minus_24 = mk_insn(BPF_CLASS_ALU64 | (bs::BPF_ADD as u8), 2, 0, 0, -24);
let mov_r3_from_r10 = mov_x(3, 10);
let r3_minus_24 = mk_insn(BPF_CLASS_ALU64 | (bs::BPF_ADD as u8), 3, 0, 0, -24);
let call_update = helper_call(2);
let [ld_lo_post, ld_hi_post] = ld_imm64(1, var_off as i32);
let call_lookup = helper_call(BPF_FUNC_MAP_LOOKUP_ELEM);
let ldx_v_field = ldx(BPF_SIZE_DW, 2, 0, 8);
let stx_into_p = stx(BPF_SIZE_DW, 6, 2, 0);
let insns = vec![
pseudo_call,
stx_spill,
ld_lo_pre,
ld_hi_pre,
mov_r2_from_r10,
r2_minus_24,
mov_r3_from_r10,
r3_minus_24,
call_update,
ld_lo_post,
ld_hi_post,
call_lookup,
ldx_v_field,
stx_into_p,
exit(),
];
let datasec_pointers = vec![
DatasecPointer {
insn_offset: 2,
datasec_type_id: datasec_id,
base_offset: var_off,
},
DatasecPointer {
insn_offset: 9,
datasec_type_id: datasec_id,
base_offset: var_off,
},
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&datasec_pointers,
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
let _ = v_id;
assert_eq!(
map.get(&(p_id, 0)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"map-value arena propagation must surface `(P, 0) -> Arena` \
after update_elem(&V_with_arena_at_off_8) -> lookup_elem -> \
LDX V.field -> STX into P.field: {map:?}"
);
}
#[test]
fn cross_function_fixpoint_callee_before_caller() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_parent = push_name(&mut strings, "Parent");
let n_field = push_name(&mut strings, "arena_field");
let n_caller = push_name(&mut strings, "caller");
let n_callee = push_name(&mut strings, "callee");
let n_p1 = push_name(&mut strings, "parent_raw");
let n_p2 = push_name(&mut strings, "val_raw");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_parent,
size: 16,
members: vec![SynMember {
name_off: n_field,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Ptr { type_id: 2 },
SynType::FuncProto {
return_type_id: 0,
params: vec![
SynParam {
name_off: n_p1,
type_id: 1,
},
SynParam {
name_off: n_p2,
type_id: 1,
},
],
},
SynType::Func {
name_off: n_callee,
type_id: 4,
linkage: 1,
},
SynType::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: n_p1,
type_id: 3,
}],
},
SynType::Func {
name_off: n_caller,
type_id: 6,
linkage: 1,
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let parent_id: u32 = 2;
let alloc_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let callee_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, -7);
let insns = vec![
stx(BPF_SIZE_DW, 1, 2, 8), exit(), mov_x(6, 1), alloc_call, mov_x(2, 0), mov_x(1, 6), callee_call, exit(), ];
let map = analyze_casts(
&insns,
&btf,
&[],
&[
FuncEntry {
insn_offset: 0,
func_proto_id: 4,
}, FuncEntry {
insn_offset: 2,
func_proto_id: 6,
}, ],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 3,
}], );
assert_eq!(
map.get(&(parent_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
}),
"fixpoint must propagate caller's [Pointer{{Parent}}, ArenaU64FromAlloc] \
into callee at lower PC, producing (Parent, 8) -> Arena: {map:?}"
);
}
#[test]
fn finalize_arena_stx_emits_deferred_resolve_when_shape_inference_ambiguous() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_q = push_name(&mut strings, "Q");
let n_r = push_name(&mut strings, "R");
let n_f = push_name(&mut strings, "f");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_p,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
SynType::Struct {
name_off: n_r,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id = 2;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
ldx(BPF_SIZE_DW, 2, 1, 8),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_DW, 4, 2, 8),
mov_x(6, 1),
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: p_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 4,
}],
);
let hit = map
.get(&(p_id, 8))
.expect("(P, 8) must be in CastMap even when shape inference is ambiguous");
assert_eq!(
hit.addr_space,
AddrSpace::Arena,
"ambiguous-shape STX-flow tag must still emit AddrSpace::Arena: {map:?}"
);
assert_eq!(
hit.target_type_id, 0,
"ambiguous shape (Q and R both 16-byte u64@0+u64@8) must yield \
target_type_id=0 (deferred resolve via MemReader::resolve_arena_type \
bridge at chase time). target_type_id={} suggests one of Q/R was \
picked arbitrarily — false-positive render of the wrong struct \
shape: {map:?}",
hit.target_type_id
);
assert!(
hit.target_type_id != 3 && hit.target_type_id != 4,
"target_type_id={} must NOT be one of the ambiguous candidates Q (3) or R (4); \
picking one arbitrarily would render the slot's payload against the wrong \
struct shape at chase time: {map:?}",
hit.target_type_id
);
}
#[test]
fn stx_flow_stx_before_deref_resolves_target_via_shape_inference() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = push_name(&mut strings, "u64");
let n_p = push_name(&mut strings, "P");
let n_q = push_name(&mut strings, "Q");
let n_cgx = push_name(&mut strings, "cgx_raw");
let n_a = push_name(&mut strings, "a");
let n_b = push_name(&mut strings, "b");
let types = vec![
SynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynType::Struct {
name_off: n_p,
size: 16,
members: vec![SynMember {
name_off: n_cgx,
type_id: 1,
byte_offset: 8,
}],
},
SynType::Struct {
name_off: n_q,
size: 16,
members: vec![
SynMember {
name_off: n_a,
type_id: 1,
byte_offset: 0,
},
SynMember {
name_off: n_b,
type_id: 1,
byte_offset: 8,
},
],
},
];
let blob = build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).unwrap();
let p_id = 2;
let q_id = 3;
let pseudo_call = mk_insn(BPF_CLASS_JMP | BPF_OP_CALL, 0, BPF_PSEUDO_CALL, 0, 0);
let insns = vec![
pseudo_call,
stx(BPF_SIZE_DW, 6, 0, 8),
ldx(BPF_SIZE_DW, 2, 6, 8),
ldx(BPF_SIZE_DW, 3, 2, 0),
ldx(BPF_SIZE_DW, 4, 2, 8),
exit(),
];
let map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 6,
struct_type_id: p_id,
}],
&[],
&[],
&[SubprogReturn {
alloc_size: None,
insn_offset: 0,
}],
);
let hit = map
.get(&(p_id, 8))
.expect("(P, 8) must be in CastMap — STX-flow gates emission");
assert_eq!(
hit.addr_space,
AddrSpace::Arena,
"STX-flow tag must yield AddrSpace::Arena: {map:?}"
);
assert_eq!(
hit.target_type_id, q_id,
"post-fix shape inference must resolve target_type_id=q_id ({q_id}) \
even when the STX-flow tag fires BEFORE the deref pattern \
(pre-fix bug: alias-tagged register dropped accesses, leaving \
target_type_id=0): {map:?}"
);
}