use super::*;
use crate::monitor::cast_analysis::{AddrSpace, CastHit};
use goblin::elf::header as h;
use goblin::elf::section_header as sh;
use goblin::elf::sym as syms;
use std::io::Write;
struct SecSpec {
name: &'static str,
sh_type: u32,
sh_flags: u64,
sh_addr: u64,
data: Vec<u8>,
sh_link: u32,
sh_info: u32,
sh_entsize: u64,
}
impl SecSpec {
fn new(name: &'static str, sh_type: u32) -> Self {
Self {
name,
sh_type,
sh_flags: 0,
sh_addr: 0,
data: Vec::new(),
sh_link: 0,
sh_info: 0,
sh_entsize: 0,
}
}
fn flags(mut self, f: u64) -> Self {
self.sh_flags = f;
self
}
fn data(mut self, d: Vec<u8>) -> Self {
self.data = d;
self
}
fn link(mut self, l: u32) -> Self {
self.sh_link = l;
self
}
fn info(mut self, i: u32) -> Self {
self.sh_info = i;
self
}
fn entsize(mut self, e: u64) -> Self {
self.sh_entsize = e;
self
}
}
fn build_elf64(sections: Vec<SecSpec>, e_machine: u16, e_type: u16) -> Vec<u8> {
let mut shstrtab: Vec<u8> = vec![0u8]; let null_name_off = 0u32;
let mut sec_name_offs: Vec<u32> = Vec::new();
for s in §ions {
sec_name_offs.push(shstrtab.len() as u32);
shstrtab.extend_from_slice(s.name.as_bytes());
shstrtab.push(0);
}
let shstrtab_self_name_off = shstrtab.len() as u32;
shstrtab.extend_from_slice(b".shstrtab");
shstrtab.push(0);
let ehdr_size: usize = 64;
let shdr_size: usize = 64;
let mut data_blob: Vec<u8> = Vec::new();
let mut sec_file_off: Vec<u64> = Vec::new();
sec_file_off.push(0);
let mut cursor: u64 = ehdr_size as u64;
for s in §ions {
sec_file_off.push(cursor);
data_blob.extend_from_slice(&s.data);
cursor += s.data.len() as u64;
}
let shstrtab_file_off = cursor;
data_blob.extend_from_slice(&shstrtab);
cursor += shstrtab.len() as u64;
let shoff = cursor;
let shnum = (1 + sections.len() + 1) as u16;
let shstrndx = (1 + sections.len()) as u16;
let mut blob: Vec<u8> = Vec::with_capacity(ehdr_size);
blob.extend_from_slice(h::ELFMAG); blob.push(h::ELFCLASS64); blob.push(h::ELFDATA2LSB); blob.push(h::EV_CURRENT); blob.push(0); blob.push(0); blob.extend_from_slice(&[0u8; 7]);
blob.extend_from_slice(&e_type.to_le_bytes());
blob.extend_from_slice(&e_machine.to_le_bytes());
blob.extend_from_slice(&1u32.to_le_bytes()); blob.extend_from_slice(&0u64.to_le_bytes()); blob.extend_from_slice(&0u64.to_le_bytes()); blob.extend_from_slice(&shoff.to_le_bytes()); blob.extend_from_slice(&0u32.to_le_bytes()); blob.extend_from_slice(&(ehdr_size as u16).to_le_bytes()); blob.extend_from_slice(&0u16.to_le_bytes()); blob.extend_from_slice(&0u16.to_le_bytes()); blob.extend_from_slice(&(shdr_size as u16).to_le_bytes()); blob.extend_from_slice(&shnum.to_le_bytes()); blob.extend_from_slice(&shstrndx.to_le_bytes());
blob.extend_from_slice(&data_blob);
let mut write_shdr = |sh_name: u32,
sh_type: u32,
sh_flags: u64,
sh_addr: u64,
sh_offset: u64,
sh_size: u64,
sh_link: u32,
sh_info: u32,
sh_addralign: u64,
sh_entsize: u64| {
blob.write_all(&sh_name.to_le_bytes()).unwrap();
blob.write_all(&sh_type.to_le_bytes()).unwrap();
blob.write_all(&sh_flags.to_le_bytes()).unwrap();
blob.write_all(&sh_addr.to_le_bytes()).unwrap();
blob.write_all(&sh_offset.to_le_bytes()).unwrap();
blob.write_all(&sh_size.to_le_bytes()).unwrap();
blob.write_all(&sh_link.to_le_bytes()).unwrap();
blob.write_all(&sh_info.to_le_bytes()).unwrap();
blob.write_all(&sh_addralign.to_le_bytes()).unwrap();
blob.write_all(&sh_entsize.to_le_bytes()).unwrap();
};
write_shdr(null_name_off, sh::SHT_NULL, 0, 0, 0, 0, 0, 0, 0, 0);
for (i, s) in sections.iter().enumerate() {
write_shdr(
sec_name_offs[i],
s.sh_type,
s.sh_flags,
s.sh_addr,
sec_file_off[i + 1],
s.data.len() as u64,
s.sh_link,
s.sh_info,
1,
s.sh_entsize,
);
}
write_shdr(
shstrtab_self_name_off,
sh::SHT_STRTAB,
0,
0,
shstrtab_file_off,
shstrtab.len() as u64,
0,
0,
1,
0,
);
blob
}
fn elf64_sym(st_name: u32, st_info: u8, st_shndx: u16, st_value: u64, st_size: u64) -> [u8; 24] {
let mut out = [0u8; 24];
out[0..4].copy_from_slice(&st_name.to_le_bytes());
out[4] = st_info;
out[5] = 0; out[6..8].copy_from_slice(&st_shndx.to_le_bytes());
out[8..16].copy_from_slice(&st_value.to_le_bytes());
out[16..24].copy_from_slice(&st_size.to_le_bytes());
out
}
fn st_info(bind: u8, ty: u8) -> u8 {
(bind << 4) | (ty & 0x0f)
}
fn build_btf_blob(types: &[u8], strings: &[u8]) -> Vec<u8> {
let type_len = types.len() as u32;
let str_len = strings.len() as u32;
let mut blob = Vec::new();
blob.write_all(&0xEB9F_u16.to_le_bytes()).unwrap(); blob.push(1); blob.push(0); blob.write_all(&24u32.to_le_bytes()).unwrap(); blob.write_all(&0u32.to_le_bytes()).unwrap(); blob.write_all(&type_len.to_le_bytes()).unwrap(); blob.write_all(&type_len.to_le_bytes()).unwrap(); blob.write_all(&str_len.to_le_bytes()).unwrap(); blob.extend_from_slice(types);
blob.extend_from_slice(strings);
blob
}
#[test]
fn cached_cast_analysis_nonexistent_path_returns_none() {
let p = std::path::Path::new("/tmp/ktstr-cast-analysis-nonexistent-fixture-path-do-not-create");
assert!(
!p.exists(),
"fixture path must not exist; remove it before running this test"
);
assert!(cached_cast_analysis_for_scheduler(p).is_none());
}
#[test]
fn cached_cast_analysis_empty_file_returns_none() {
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("empty.bin");
std::fs::write(&p, b"").expect("write empty file");
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
}
#[test]
fn cached_cast_analysis_no_bpf_objs_section_returns_none() {
let blob = build_elf64(
vec![SecSpec::new(".text", sh::SHT_PROGBITS).flags(sh::SHF_EXECINSTR.into())],
h::EM_X86_64,
h::ET_REL,
);
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("no_bpf_objs.elf");
std::fs::write(&p, &blob).expect("write");
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
}
#[test]
fn btf_str_at_empty_returns_none() {
assert!(btf_str_at(&[], 0).is_none());
assert!(btf_str_at(&[0u8; 23], 0).is_none());
}
#[test]
fn btf_str_at_offset_past_strtab_returns_none() {
let strings = b"\0abc\0\0";
let blob = build_btf_blob(&[], strings);
assert!(btf_str_at(&blob, 100).is_none());
}
#[test]
fn btf_str_at_offset_at_boundary_returns_none() {
let strings = b"\0abc\0";
let blob = build_btf_blob(&[], strings);
assert!(btf_str_at(&blob, strings.len() as u32).is_none());
}
#[test]
fn btf_str_at_no_null_terminator_invalid_utf8_returns_none() {
let strings = vec![0u8, 0xff, 0xff];
let blob = build_btf_blob(&[], &strings);
assert!(btf_str_at(&blob, 1).is_none());
}
#[test]
fn btf_str_at_valid_returns_string() {
let strings = b"\0hello\0world\0";
let blob = build_btf_blob(&[], strings);
assert_eq!(btf_str_at(&blob, 1), Some("hello"));
assert_eq!(btf_str_at(&blob, 7), Some("world"));
assert_eq!(btf_str_at(&blob, 0), Some(""));
}
#[test]
fn parse_btf_ext_too_short_returns_empty() {
let btf_bytes = build_btf_blob(&[], b"\0");
let blob = build_elf64(vec![], h::EM_BPF, h::ET_REL);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bases = HashMap::new();
for short_len in [0usize, 23] {
let data = vec![0u8; short_len];
let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert!(out.is_empty(), "len={short_len}");
}
}
#[test]
fn parse_btf_ext_wrong_magic_returns_empty() {
let mut data = vec![0u8; 24];
data[0..2].copy_from_slice(&0xDEADu16.to_le_bytes());
let btf_bytes = build_btf_blob(&[], b"\0");
let blob = build_elf64(vec![], h::EM_BPF, h::ET_REL);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bases = HashMap::new();
let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert!(out.is_empty());
}
#[test]
fn parse_btf_ext_bad_hdr_len_returns_empty() {
let btf_bytes = build_btf_blob(&[], b"\0");
let blob = build_elf64(vec![], h::EM_BPF, h::ET_REL);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bases = HashMap::new();
let mut data = vec![0u8; 24];
data[0..2].copy_from_slice(&0xEB9F_u16.to_le_bytes());
data[4..8].copy_from_slice(&16u32.to_le_bytes());
let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert!(out.is_empty(), "hdr_len=16 should be rejected");
let mut data = vec![0u8; 24];
data[0..2].copy_from_slice(&0xEB9F_u16.to_le_bytes());
data[4..8].copy_from_slice(&1024u32.to_le_bytes());
let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert!(out.is_empty(), "hdr_len > data.len should be rejected");
}
#[test]
fn parse_btf_ext_func_info_window_oob_returns_empty() {
let btf_bytes = build_btf_blob(&[], b"\0");
let blob = build_elf64(vec![], h::EM_BPF, h::ET_REL);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bases = HashMap::new();
let mut data = vec![0u8; 32];
data[0..2].copy_from_slice(&0xEB9F_u16.to_le_bytes());
data[4..8].copy_from_slice(&24u32.to_le_bytes()); data[8..12].copy_from_slice(&0u32.to_le_bytes()); data[12..16].copy_from_slice(&10_000u32.to_le_bytes()); let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert!(out.is_empty());
}
#[test]
fn parse_btf_ext_record_size_too_small_returns_empty() {
let btf_bytes = build_btf_blob(&[], b"\0");
let blob = build_elf64(vec![], h::EM_BPF, h::ET_REL);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bases = HashMap::new();
let mut data = vec![0u8; 32];
data[0..2].copy_from_slice(&0xEB9F_u16.to_le_bytes());
data[4..8].copy_from_slice(&24u32.to_le_bytes()); data[8..12].copy_from_slice(&0u32.to_le_bytes()); data[12..16].copy_from_slice(&8u32.to_le_bytes()); data[24..28].copy_from_slice(&4u32.to_le_bytes());
let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert!(out.is_empty());
}
#[test]
fn parse_btf_ext_non_multiple_insn_off_skips_entry() {
let bytes_strs = b"\0txt\0";
let btf_bytes = build_btf_blob(&[], bytes_strs);
let inner = build_elf64(
vec![SecSpec::new("txt", sh::SHT_PROGBITS).flags(sh::SHF_EXECINSTR.into())],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let mut bases: HashMap<u32, usize> = HashMap::new();
bases.insert(1, 0);
let mut data = Vec::new();
data.extend_from_slice(&0xEB9F_u16.to_le_bytes()); data.push(1); data.push(0); data.extend_from_slice(&24u32.to_le_bytes()); data.extend_from_slice(&0u32.to_le_bytes()); data.extend_from_slice(&28u32.to_le_bytes()); data.extend_from_slice(&28u32.to_le_bytes()); data.extend_from_slice(&0u32.to_le_bytes()); data.extend_from_slice(&8u32.to_le_bytes()); data.extend_from_slice(&1u32.to_le_bytes()); data.extend_from_slice(&2u32.to_le_bytes()); data.extend_from_slice(&8u32.to_le_bytes());
data.extend_from_slice(&42u32.to_le_bytes()); data.extend_from_slice(&12u32.to_le_bytes());
data.extend_from_slice(&99u32.to_le_bytes()); let out = parse_btf_ext_func_entries(&data, &btf_bytes, &elf, &bases);
assert_eq!(out.len(), 1, "got {out:?}");
assert_eq!(out[0].insn_offset, 1);
assert_eq!(out[0].func_proto_id, 42);
}
#[test]
fn iter_embedded_bpf_objects_no_symbols_falls_back_to_full_section() {
let payload = b"DUMMY_BPF_OBJ_BYTES".to_vec();
let payload_len = payload.len();
let blob = build_elf64(
vec![SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(payload)],
h::EM_X86_64,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bpf_objs_idx = find_section(&elf, ".bpf.objs").expect(".bpf.objs");
let out = iter_embedded_bpf_objects(&elf, &blob, bpf_objs_idx);
assert_eq!(out.len(), 1, "expected one fallback slice");
assert_eq!(out[0].len(), payload_len);
assert_eq!(out[0], b"DUMMY_BPF_OBJ_BYTES");
}
#[test]
fn section_data_overflow_returns_none() {
let payload = b"PAYLOAD".to_vec();
let mut blob = build_elf64(
vec![SecSpec::new(".x", sh::SHT_PROGBITS).data(payload)],
h::EM_X86_64,
h::ET_REL,
);
let elf_view = goblin::elf::Elf::parse(&blob).unwrap();
let shoff = elf_view.header.e_shoff as usize;
let shdr1_off = shoff + 64;
blob[shdr1_off + 24..shdr1_off + 32].copy_from_slice(&u64::MAX.to_le_bytes());
blob[shdr1_off + 32..shdr1_off + 40].copy_from_slice(&u64::MAX.to_le_bytes());
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let idx = find_section(&elf, ".x").expect(".x");
assert!(section_data(&elf, &blob, idx).is_none());
}
#[test]
fn smoke_symtab_helpers_compile() {
let strtab = b"\0bpf_obj\0".to_vec();
let mut symtab = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
1, st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
1, 0, 8, ));
let blob = build_elf64(
vec![
SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(vec![0u8; 8]),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2) .entsize(24),
],
h::EM_X86_64,
h::ET_REL,
);
let _ = goblin::elf::Elf::parse(&blob).expect("parse");
}
#[test]
fn find_section_locates_named_section() {
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS).flags(sh::SHF_EXECINSTR.into()),
SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(vec![0u8; 4]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
assert_eq!(find_section(&elf, ".text"), Some(1));
assert_eq!(find_section(&elf, ".bpf.objs"), Some(2));
}
#[test]
fn find_section_missing_returns_none() {
let blob = build_elf64(
vec![SecSpec::new(".text", sh::SHT_PROGBITS).flags(sh::SHF_EXECINSTR.into())],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
assert_eq!(find_section(&elf, ".nope"), None);
}
#[test]
fn section_data_returns_section_bytes() {
let payload = b"section-bytes-payload-12345".to_vec();
let payload_len = payload.len();
let blob = build_elf64(
vec![SecSpec::new(".x", sh::SHT_PROGBITS).data(payload)],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let idx = find_section(&elf, ".x").unwrap();
let bytes = section_data(&elf, &blob, idx).expect("payload slice");
assert_eq!(bytes.len(), payload_len);
assert_eq!(bytes, &b"section-bytes-payload-12345"[..]);
}
#[test]
fn section_data_out_of_range_returns_none() {
let blob = build_elf64(
vec![SecSpec::new(".text", sh::SHT_PROGBITS)],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
assert!(section_data(&elf, &blob, 9999).is_none());
}
#[test]
fn iter_embedded_bpf_objects_uses_object_symbol() {
let payload: Vec<u8> = (0..32u8).collect();
let strtab = b"\0bpf_obj\0".to_vec();
let mut symtab = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
1, 4, 24, ));
let blob = build_elf64(
vec![
SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(payload),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
],
h::EM_X86_64,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bpf_objs_idx = find_section(&elf, ".bpf.objs").unwrap();
let out = iter_embedded_bpf_objects(&elf, &blob, bpf_objs_idx);
assert_eq!(out.len(), 1);
assert_eq!(out[0].len(), 24);
let expected: Vec<u8> = (4..28u8).collect();
assert_eq!(out[0], expected.as_slice());
}
#[test]
fn iter_embedded_bpf_objects_rejects_oversized_symbol() {
let payload = b"0123456789abcdef".to_vec(); let payload_len = payload.len();
let strtab = b"\0bpf_obj\0".to_vec();
let mut symtab = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
1,
0,
200,
));
let blob = build_elf64(
vec![
SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(payload),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
],
h::EM_X86_64,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bpf_objs_idx = find_section(&elf, ".bpf.objs").unwrap();
let out = iter_embedded_bpf_objects(&elf, &blob, bpf_objs_idx);
assert_eq!(out.len(), 1, "fallback yields exactly one slice");
assert_eq!(out[0].len(), payload_len);
}
#[test]
fn iter_embedded_bpf_objects_skips_non_object_symbols() {
let payload = b"hello-bpf-objects".to_vec();
let payload_len = payload.len();
let strtab = b"\0func_sym\0".to_vec();
let mut symtab = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
1,
st_info(syms::STB_GLOBAL, syms::STT_FUNC),
1,
0,
8,
));
let blob = build_elf64(
vec![
SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(payload),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
],
h::EM_X86_64,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).unwrap();
let bpf_objs_idx = find_section(&elf, ".bpf.objs").unwrap();
let out = iter_embedded_bpf_objects(&elf, &blob, bpf_objs_idx);
assert_eq!(out.len(), 1);
assert_eq!(out[0].len(), payload_len);
}
fn insn_to_bytes(i: BpfInsn) -> [u8; 8] {
let regs_byte = (i.dst_reg() & 0x0f) | ((i.src_reg() & 0x0f) << 4);
let mut buf = [0u8; 8];
buf[0] = i.code;
buf[1] = regs_byte;
buf[2..4].copy_from_slice(&i.off.to_le_bytes());
buf[4..8].copy_from_slice(&i.imm.to_le_bytes());
buf
}
fn insns_to_text_bytes(insns: &[BpfInsn]) -> Vec<u8> {
let mut out = Vec::with_capacity(insns.len() * 8);
for ins in insns {
out.extend_from_slice(&insn_to_bytes(*ins));
}
out
}
const OP_LDX_DW_MEM: u8 = 0x01 | 0x18 | 0x60; const OP_JMP_EXIT: u8 = 0x05 | 0x90;
fn ldx_dw_mem(dst: u8, src: u8, off: i16) -> BpfInsn {
BpfInsn::new(OP_LDX_DW_MEM, dst, src, off, 0)
}
fn exit_insn() -> BpfInsn {
BpfInsn::new(OP_JMP_EXIT, 0, 0, 0, 0)
}
fn addr_space_cast_insn(dst: u8, src: u8) -> BpfInsn {
use libbpf_rs::libbpf_sys as bs;
let code = (bs::BPF_ALU64 | bs::BPF_MOV | bs::BPF_X) as u8;
BpfInsn::new(code, dst, src, 1, 1)
}
const SYN_BTF_KIND_INT: u32 = 1;
const SYN_BTF_KIND_PTR: u32 = 2;
const SYN_BTF_KIND_STRUCT: u32 = 4;
const SYN_BTF_KIND_FWD: u32 = 7;
const SYN_BTF_KIND_FUNC: u32 = 12;
const SYN_BTF_KIND_FUNC_PROTO: u32 = 13;
fn push_btf_name(s: &mut Vec<u8>, name: &str) -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
}
#[derive(Clone, Copy)]
struct SynMember {
name_off: u32,
type_id: u32,
byte_offset: u32,
}
#[derive(Clone, Copy)]
struct SynParam {
name_off: u32,
type_id: u32,
}
enum SynKind {
Int {
name_off: u32,
size: u32,
encoding: u32,
offset: u32,
bits: u32,
},
Ptr {
type_id: u32,
},
Struct {
name_off: u32,
size: u32,
members: Vec<SynMember>,
},
Fwd {
name_off: u32,
kind_flag: u32,
},
Func {
name_off: u32,
type_id: u32,
linkage: u32,
},
FuncProto {
return_type_id: u32,
params: Vec<SynParam>,
},
}
fn build_btf_full(types: &[SynKind], strings: &[u8]) -> Vec<u8> {
let mut type_section = Vec::new();
for ty in types {
match ty {
SynKind::Int {
name_off,
size,
encoding,
offset,
bits,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (SYN_BTF_KIND_INT << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
let int_data = (*encoding << 24) | ((*offset & 0xff) << 16) | (*bits & 0xff);
type_section.extend_from_slice(&int_data.to_le_bytes());
}
SynKind::Ptr { type_id } => {
type_section.extend_from_slice(&0u32.to_le_bytes());
let info = (SYN_BTF_KIND_PTR << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynKind::Struct {
name_off,
size,
members,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = members.len() as u32;
let info = ((SYN_BTF_KIND_STRUCT << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
for m in members {
type_section.extend_from_slice(&m.name_off.to_le_bytes());
type_section.extend_from_slice(&m.type_id.to_le_bytes());
let bit_off = m.byte_offset * 8;
type_section.extend_from_slice(&bit_off.to_le_bytes());
}
}
SynKind::Fwd {
name_off,
kind_flag,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = ((SYN_BTF_KIND_FWD << 24) & 0x1f00_0000) | ((*kind_flag & 1) << 31);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&0u32.to_le_bytes());
}
SynKind::Func {
name_off,
type_id,
linkage,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = ((SYN_BTF_KIND_FUNC << 24) & 0x1f00_0000) | (*linkage & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
SynKind::FuncProto {
return_type_id,
params,
} => {
type_section.extend_from_slice(&0u32.to_le_bytes());
let vlen = params.len() as u32;
let info = ((SYN_BTF_KIND_FUNC_PROTO << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&return_type_id.to_le_bytes());
for p in params {
type_section.extend_from_slice(&p.name_off.to_le_bytes());
type_section.extend_from_slice(&p.type_id.to_le_bytes());
}
}
}
}
let type_len = type_section.len() as u32;
let str_len = strings.len() as u32;
let mut blob = Vec::new();
blob.write_all(&0xEB9F_u16.to_le_bytes()).unwrap();
blob.push(1); blob.push(0); blob.write_all(&24u32.to_le_bytes()).unwrap(); blob.write_all(&0u32.to_le_bytes()).unwrap(); blob.write_all(&type_len.to_le_bytes()).unwrap(); blob.write_all(&type_len.to_le_bytes()).unwrap(); blob.write_all(&str_len.to_le_bytes()).unwrap();
blob.extend_from_slice(&type_section);
blob.extend_from_slice(strings);
blob
}
fn build_btf_ext(section_name_off: u32, records: &[(u32, u32)], record_size: u32) -> Vec<u8> {
let header_len = 24u32;
let info_len = 4 + 4 + 4 + records.len() as u32 * record_size;
let mut info = Vec::new();
info.extend_from_slice(&record_size.to_le_bytes());
info.extend_from_slice(§ion_name_off.to_le_bytes());
info.extend_from_slice(&(records.len() as u32).to_le_bytes());
for (insn_off, type_id) in records {
info.extend_from_slice(&insn_off.to_le_bytes());
info.extend_from_slice(&type_id.to_le_bytes());
let pad = record_size.saturating_sub(8) as usize;
info.extend(std::iter::repeat_n(0, pad));
}
let mut out = Vec::new();
out.extend_from_slice(&0xEB9F_u16.to_le_bytes());
out.push(1);
out.push(0);
out.extend_from_slice(&header_len.to_le_bytes());
out.extend_from_slice(&0u32.to_le_bytes()); out.extend_from_slice(&info_len.to_le_bytes());
out.extend_from_slice(&info_len.to_le_bytes()); out.extend_from_slice(&0u32.to_le_bytes()); out.extend_from_slice(&info);
out
}
fn build_full_bpf_object_elf(text: Vec<u8>, btf: Vec<u8>, btf_ext: Vec<u8>) -> Vec<u8> {
build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf),
SecSpec::new(".BTF.ext", sh::SHT_PROGBITS).data(btf_ext),
],
h::EM_BPF,
h::ET_REL,
)
}
#[test]
fn analyze_one_object_corrupt_elf_returns_empty() {
let bytes = vec![0u8; 64]; let (map, btf, _alloc_sizes) = analyze_one_object_with_btf(&bytes);
assert!(map.is_empty());
assert!(btf.is_none());
}
#[test]
fn analyze_one_object_no_btf_returns_empty() {
let bytes = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 8]),
],
h::EM_BPF,
h::ET_REL,
);
let (map, btf, _alloc_sizes) = analyze_one_object_with_btf(&bytes);
assert!(map.is_empty());
assert!(btf.is_none());
}
#[test]
fn analyze_one_object_corrupt_btf_returns_empty() {
let bytes = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(insns_to_text_bytes(&[exit_insn()])),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(vec![0xFFu8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let (map, btf, _alloc_sizes) = analyze_one_object_with_btf(&bytes);
assert!(map.is_empty());
assert!(btf.is_none());
}
#[test]
fn analyze_one_object_no_text_section_returns_empty() {
let bytes = build_elf64(
vec![SecSpec::new(".BTF", sh::SHT_PROGBITS).data(build_btf_blob(&[], b"\0"))],
h::EM_BPF,
h::ET_REL,
);
let (map, btf, _alloc_sizes) = analyze_one_object_with_btf(&bytes);
assert!(map.is_empty());
assert!(btf.is_some());
}
#[test]
fn analyze_one_object_misaligned_text_skipped() {
let bytes = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 7]),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(build_btf_blob(&[], b"\0")),
],
h::EM_BPF,
h::ET_REL,
);
let (map, btf, _alloc_sizes) = analyze_one_object_with_btf(&bytes);
assert!(map.is_empty());
assert!(btf.is_some());
}
#[test]
fn analyze_one_object_recovers_arena_cast_end_to_end() {
let mut strings = vec![0u8];
let n_int = push_btf_name(&mut strings, "u64");
let n_t = push_btf_name(&mut strings, "T");
let n_q = push_btf_name(&mut strings, "Q");
let n_f = push_btf_name(&mut strings, "f");
let n_x = push_btf_name(&mut strings, "x");
let n_func = push_btf_name(&mut strings, "myfunc");
let n_text = push_btf_name(&mut strings, ".text");
let types = vec![
SynKind::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynKind::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynKind::Ptr { type_id: 2 },
SynKind::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: 0,
type_id: 4,
}],
},
SynKind::Func {
name_off: n_func,
type_id: 5,
linkage: 1,
},
];
let btf_blob = build_btf_full(&types, &strings);
let insns = vec![
ldx_dw_mem(2, 1, 8),
addr_space_cast_insn(2, 2),
ldx_dw_mem(3, 2, 0),
exit_insn(),
];
let text = insns_to_text_bytes(&insns);
let btf_ext = build_btf_ext(n_text, &[(0, 5)], 8);
let bytes = build_full_bpf_object_elf(text, btf_blob, btf_ext);
let (map, btf, _alloc_sizes) = analyze_one_object_with_btf(&bytes);
assert!(btf.is_some(), "valid BTF must be returned");
let hit = map.get(&(2u32, 8u32)).copied();
assert_eq!(
hit,
Some(CastHit {
alloc_size: None,
target_type_id: 3,
addr_space: AddrSpace::Arena,
}),
"expected arena cast T.f → Q*, got {map:?}"
);
}
#[test]
fn cached_cast_analysis_corrupt_inner_returns_none() {
let outer = build_elf64(
vec![SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(b"not-an-elf".to_vec())],
h::EM_X86_64,
h::ET_REL,
);
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("bad_inner.bin");
std::fs::write(&p, &outer).expect("write");
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
}
#[test]
fn cached_cast_analysis_inner_without_btf_returns_none() {
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 8]),
],
h::EM_BPF,
h::ET_REL,
);
let outer = build_elf64(
vec![SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(inner)],
h::EM_X86_64,
h::ET_REL,
);
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("no_inner_btf.bin");
std::fs::write(&p, &outer).expect("write");
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
}
#[test]
fn cached_cast_analysis_recovers_arena_cast_end_to_end() {
let mut strings = vec![0u8];
let n_int = push_btf_name(&mut strings, "u64");
let n_t = push_btf_name(&mut strings, "T");
let n_q = push_btf_name(&mut strings, "Q");
let n_f = push_btf_name(&mut strings, "f");
let n_x = push_btf_name(&mut strings, "x");
let n_func = push_btf_name(&mut strings, "myfunc");
let n_text = push_btf_name(&mut strings, ".text");
let types = vec![
SynKind::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynKind::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynKind::Ptr { type_id: 2 },
SynKind::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: 0,
type_id: 4,
}],
},
SynKind::Func {
name_off: n_func,
type_id: 5,
linkage: 1,
},
];
let btf_blob = build_btf_full(&types, &strings);
let insns = vec![
ldx_dw_mem(2, 1, 8),
addr_space_cast_insn(2, 2),
ldx_dw_mem(3, 2, 0),
exit_insn(),
];
let text = insns_to_text_bytes(&insns);
let btf_ext = build_btf_ext(n_text, &[(0, 5)], 8);
let inner = build_full_bpf_object_elf(text, btf_blob, btf_ext);
let outer = build_elf64(
vec![SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(inner)],
h::EM_X86_64,
h::ET_REL,
);
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("full.bin");
std::fs::write(&p, &outer).expect("write");
let out = cached_cast_analysis_for_scheduler(&p).expect("non-empty fixture must produce Some");
let hit = out.cast_maps[0].get(&(2u32, 8u32)).copied();
assert_eq!(
hit,
Some(CastHit {
alloc_size: None,
target_type_id: 3,
addr_space: AddrSpace::Arena,
}),
"expected arena cast T.f → Q*, got {:?}",
out.cast_maps[0]
);
}
fn build_recovers_arena_cast_outer_elf() -> Vec<u8> {
let mut strings = vec![0u8];
let n_int = push_btf_name(&mut strings, "u64");
let n_t = push_btf_name(&mut strings, "T");
let n_q = push_btf_name(&mut strings, "Q");
let n_f = push_btf_name(&mut strings, "f");
let n_x = push_btf_name(&mut strings, "x");
let n_func = push_btf_name(&mut strings, "myfunc");
let n_text = push_btf_name(&mut strings, ".text");
let types = vec![
SynKind::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_t,
size: 16,
members: vec![SynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
SynKind::Struct {
name_off: n_q,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynKind::Ptr { type_id: 2 },
SynKind::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: 0,
type_id: 4,
}],
},
SynKind::Func {
name_off: n_func,
type_id: 5,
linkage: 1,
},
];
let btf_blob = build_btf_full(&types, &strings);
let insns = vec![
ldx_dw_mem(2, 1, 8),
addr_space_cast_insn(2, 2),
ldx_dw_mem(3, 2, 0),
exit_insn(),
];
let text = insns_to_text_bytes(&insns);
let btf_ext = build_btf_ext(n_text, &[(0, 5)], 8);
let inner = build_full_bpf_object_elf(text, btf_blob, btf_ext);
build_elf64(
vec![SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(inner)],
h::EM_X86_64,
h::ET_REL,
)
}
#[test]
fn cached_cast_analysis_returns_same_arc_for_same_content() {
let blob = build_recovers_arena_cast_outer_elf();
let dir = tempfile::tempdir().expect("tempdir");
let p1 = dir.path().join("first.bin");
let p2 = dir.path().join("second.bin");
std::fs::write(&p1, &blob).expect("write 1");
std::fs::write(&p2, &blob).expect("write 2");
let first = cached_cast_analysis_for_scheduler(&p1).expect("Some on non-empty analysis");
let second = cached_cast_analysis_for_scheduler(&p2).expect("cache hit on identical content");
assert!(
Arc::ptr_eq(&first, &second),
"expected pointer-equal Arc when two paths have identical content"
);
assert_eq!(
first.cast_maps[0].get(&(2u32, 8u32)).copied(),
Some(CastHit {
alloc_size: None,
target_type_id: 3,
addr_space: AddrSpace::Arena,
}),
);
}
#[test]
fn cached_cast_analysis_collapses_empty_to_none() {
let empty_blob = build_elf64(
vec![SecSpec::new(".text", sh::SHT_PROGBITS).flags(sh::SHF_EXECINSTR.into())],
h::EM_X86_64,
h::ET_REL,
);
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("empty.bin");
std::fs::write(&p, &empty_blob).expect("write");
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
}
#[test]
fn cached_cast_analysis_read_failure_does_not_pollute_cache() {
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("appears_later.bin");
assert!(!p.exists());
assert!(cached_cast_analysis_for_scheduler(&p).is_none());
let blob = build_recovers_arena_cast_outer_elf();
std::fs::write(&p, &blob).expect("write");
let out = cached_cast_analysis_for_scheduler(&p)
.expect("post-creation read should succeed and produce a non-empty CastAnalysisOutput");
assert_eq!(
out.cast_maps[0].get(&(2u32, 8u32)).copied(),
Some(CastHit {
alloc_size: None,
target_type_id: 3,
addr_space: AddrSpace::Arena,
}),
"post-creation analysis should recover the seeded cast"
);
}
#[test]
fn lazy_cast_map_get_full_is_idempotent_and_lazy() {
let blob = build_recovers_arena_cast_outer_elf();
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("lazy.bin");
std::fs::write(&p, &blob).expect("write");
let lazy = LazyCastMap::new(Some(p.clone()));
assert!(
lazy.inner.get().is_none(),
"LazyCastMap::new must not run analysis"
);
let first = lazy.get_full().expect("non-empty result");
let second = lazy.get_full().expect("non-empty result");
assert!(
Arc::ptr_eq(&first, &second),
"OnceLock-backed `.get_full()` must return the same Arc on every call"
);
}
#[test]
fn lazy_cast_map_get_full_returns_none_for_no_findings() {
let empty_blob = build_elf64(
vec![SecSpec::new(".text", sh::SHT_PROGBITS).flags(sh::SHF_EXECINSTR.into())],
h::EM_X86_64,
h::ET_REL,
);
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("no_findings.bin");
std::fs::write(&p, &empty_blob).expect("write");
let lazy = LazyCastMap::new(Some(p));
assert!(
lazy.get_full().is_none(),
"no-`.bpf.objs` binary must collapse to None on `.get_full()`"
);
}
#[test]
fn parse_btf_ext_records_produce_func_entries() {
let mut strings = vec![0u8];
let n_text = push_btf_name(&mut strings, ".text");
let btf_blob = build_btf_full(&[], &strings);
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let text_idx = find_section(&elf, ".text").expect(".text") as u32;
let mut bases: HashMap<u32, usize> = HashMap::new();
bases.insert(text_idx, 0);
let data = build_btf_ext(n_text, &[(0, 11), (16, 22)], 8);
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert_eq!(out.len(), 2, "got {out:?}");
assert_eq!(out[0].insn_offset, 0);
assert_eq!(out[0].func_proto_id, 11);
assert_eq!(out[1].insn_offset, 2);
assert_eq!(out[1].func_proto_id, 22);
}
#[test]
fn parse_btf_ext_applies_section_base_offset() {
let mut strings = vec![0u8];
let n_text = push_btf_name(&mut strings, ".text");
let btf_blob = build_btf_full(&[], &strings);
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let text_idx = find_section(&elf, ".text").expect(".text") as u32;
let mut bases: HashMap<u32, usize> = HashMap::new();
bases.insert(text_idx, 10);
let data = build_btf_ext(n_text, &[(16, 5)], 8);
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert_eq!(out.len(), 1);
assert_eq!(out[0].insn_offset, 12);
assert_eq!(out[0].func_proto_id, 5);
}
#[test]
fn parse_btf_ext_handles_padded_records() {
let mut strings = vec![0u8];
let n_text = push_btf_name(&mut strings, ".text");
let btf_blob = build_btf_full(&[], &strings);
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let text_idx = find_section(&elf, ".text").expect(".text") as u32;
let mut bases: HashMap<u32, usize> = HashMap::new();
bases.insert(text_idx, 0);
let data = build_btf_ext(n_text, &[(0, 11), (8, 22)], 16);
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert_eq!(out.len(), 2);
assert_eq!(out[0].insn_offset, 0);
assert_eq!(out[0].func_proto_id, 11);
assert_eq!(out[1].insn_offset, 1);
assert_eq!(out[1].func_proto_id, 22);
}
#[test]
fn parse_btf_ext_skips_unresolvable_section_name() {
let strings = vec![0u8];
let btf_blob = build_btf_full(&[], &strings);
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let bases: HashMap<u32, usize> = HashMap::new();
let data = build_btf_ext(999, &[(0, 7)], 8);
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert!(out.is_empty());
}
#[test]
fn parse_btf_ext_skips_section_not_in_elf() {
let mut strings = vec![0u8];
let n_other = push_btf_name(&mut strings, ".not_in_elf");
let btf_blob = build_btf_full(&[], &strings);
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let bases: HashMap<u32, usize> = HashMap::new();
let data = build_btf_ext(n_other, &[(0, 7)], 8);
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert!(out.is_empty());
}
#[test]
fn parse_btf_ext_skips_section_without_base() {
let mut strings = vec![0u8];
let n_text = push_btf_name(&mut strings, ".text");
let btf_blob = build_btf_full(&[], &strings);
let inner = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(vec![0u8; 32]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let bases: HashMap<u32, usize> = HashMap::new();
let data = build_btf_ext(n_text, &[(0, 7)], 8);
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert!(out.is_empty());
}
#[test]
fn parse_btf_ext_zero_func_info_len_returns_empty() {
let btf_blob = build_btf_full(&[], b"\0");
let inner = build_elf64(vec![], h::EM_BPF, h::ET_REL);
let elf = goblin::elf::Elf::parse(&inner).unwrap();
let bases = HashMap::new();
let mut data = vec![0u8; 24];
data[0..2].copy_from_slice(&0xEB9F_u16.to_le_bytes());
data[4..8].copy_from_slice(&24u32.to_le_bytes());
let out = parse_btf_ext_func_entries(&data, &btf_blob, &elf, &bases);
assert!(out.is_empty());
}
fn kfunc_btf_type_header(name_off: u32, kind: u32, vlen: u32, size_or_type: u32) -> [u8; 12] {
let info = ((kind << 24) & 0x1f00_0000) | (vlen & 0xffff);
let mut out = [0u8; 12];
out[0..4].copy_from_slice(&name_off.to_le_bytes());
out[4..8].copy_from_slice(&info.to_le_bytes());
out[8..12].copy_from_slice(&size_or_type.to_le_bytes());
out
}
fn build_kfunc_btf_blob(kf_name: &str) -> (Vec<u8>, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let push_name = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_u64 = push_name(&mut strings, "u64");
let n_t = push_name(&mut strings, "T");
let n_x = push_name(&mut strings, "x");
let n_func = push_name(&mut strings, kf_name);
let mut types: Vec<u8> = Vec::new();
const BTF_KIND_INT: u32 = 1;
const BTF_KIND_PTR: u32 = 2;
const BTF_KIND_STRUCT: u32 = 4;
const BTF_KIND_FUNC: u32 = 12;
const BTF_KIND_FUNC_PROTO: u32 = 13;
const BTF_FUNC_EXTERN: u32 = 2;
types.extend_from_slice(&kfunc_btf_type_header(n_u64, BTF_KIND_INT, 0, 8));
let int_data: u32 = 64;
types.extend_from_slice(&int_data.to_le_bytes());
types.extend_from_slice(&kfunc_btf_type_header(n_t, BTF_KIND_STRUCT, 1, 8));
types.extend_from_slice(&n_x.to_le_bytes());
types.extend_from_slice(&1u32.to_le_bytes());
types.extend_from_slice(&0u32.to_le_bytes());
types.extend_from_slice(&kfunc_btf_type_header(0, BTF_KIND_PTR, 0, 2));
types.extend_from_slice(&kfunc_btf_type_header(0, BTF_KIND_FUNC_PROTO, 0, 3));
types.extend_from_slice(&kfunc_btf_type_header(
n_func,
BTF_KIND_FUNC,
BTF_FUNC_EXTERN,
4,
));
let mut blob: Vec<u8> = Vec::new();
blob.extend_from_slice(&0xEB9F_u16.to_le_bytes());
blob.push(1);
blob.push(0);
blob.extend_from_slice(&24u32.to_le_bytes());
blob.extend_from_slice(&0u32.to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(strings.len() as u32).to_le_bytes());
blob.extend_from_slice(&types);
blob.extend_from_slice(&strings);
(blob, 5, 2)
}
fn elf64_rel(r_offset: u64, sym_idx: u64, r_type: u32) -> [u8; 16] {
let mut out = [0u8; 16];
out[0..8].copy_from_slice(&r_offset.to_le_bytes());
let r_info = (sym_idx << 32) | (r_type as u64);
out[8..16].copy_from_slice(&r_info.to_le_bytes());
out
}
fn pre_reloc_kfunc_call_bytes() -> [u8; 8] {
[0x85, 0x10, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]
}
fn kfunc_exit_bytes() -> [u8; 8] {
[0x95, 0, 0, 0, 0, 0, 0, 0]
}
#[test]
fn patch_kfunc_calls_happy_path_rewrites_call_site() {
let kf_name = "bpf_task_acquire";
let (btf_blob, expected_func_id, _t_id) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let kf_str_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
kf_str_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_kfunc_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_kfunc_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
assert_eq!(text_concat[0].code, 0x85);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_CALL);
assert_eq!(text_concat[0].imm, -1);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat[0].code, 0x85);
assert_eq!(
text_concat[0].src_reg(),
BPF_PSEUDO_KFUNC_CALL,
"src_reg now BPF_PSEUDO_KFUNC_CALL"
);
assert_eq!(
text_concat[0].imm, expected_func_id as i32,
"imm patched to BTF Func id"
);
assert_eq!(text_concat[1].code, 0x95);
}
#[test]
fn patch_kfunc_calls_skips_non_extern_symbol() {
let kf_name = "static_helper";
let (btf_blob, _func_id, _) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_LOCAL, syms::STT_NOTYPE),
0,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_kfunc_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_kfunc_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_CALL);
assert_eq!(text_concat[0].imm, -1);
}
#[test]
fn patch_kfunc_calls_skips_symbol_not_in_btf() {
let (btf_blob, _func_id, _) = build_kfunc_btf_blob("bpf_task_acquire");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let unknown = "unknown_kfunc";
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(unknown.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_kfunc_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_kfunc_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_CALL);
assert_eq!(text_concat[0].imm, -1);
}
#[test]
fn patch_kfunc_calls_ignores_non_text_relocations() {
let kf_name = "bpf_task_acquire";
let (btf_blob, _func_id, _) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_kfunc_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".maps", sh::SHT_PROGBITS).data(vec![0u8; 8]),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(3)
.entsize(24),
SecSpec::new(".rel.maps", sh::SHT_REL)
.data(rel_data)
.link(4)
.info(2)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_kfunc_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_CALL);
assert_eq!(text_concat[0].imm, -1);
}
#[test]
fn patch_kfunc_calls_rejects_out_of_bounds_offset() {
let kf_name = "bpf_task_acquire";
let (btf_blob, _func_id, _) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_kfunc_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(100, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_kfunc_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_CALL);
assert_eq!(text_concat[0].imm, -1);
}
#[test]
fn patch_kfunc_calls_rejects_non_call_instruction() {
let kf_name = "bpf_task_acquire";
let (btf_blob, _func_id, _) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let ld_imm64_first_slot: [u8; 8] = [0x18, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
let ld_imm64_second_slot: [u8; 8] = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&ld_imm64_first_slot);
text.extend_from_slice(&ld_imm64_second_slot);
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 1).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(ld_imm64_first_slot),
BpfInsn::from_le_bytes(ld_imm64_second_slot),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
let pre = text_concat.clone();
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat, pre);
}
#[test]
fn patch_kfunc_calls_rejects_non_minus_one_imm() {
let kf_name = "bpf_task_acquire";
let (btf_blob, _func_id, _) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let subprog_call: [u8; 8] = [0x85, 0x10, 0x00, 0x00, 0x2a, 0x00, 0x00, 0x00];
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&subprog_call);
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(subprog_call),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_CALL);
assert_eq!(text_concat[0].imm, 42);
}
#[test]
fn find_extern_func_btf_id_filters_to_func_kind() {
let mut strings: Vec<u8> = vec![0];
let n_u64 = strings.len() as u32;
strings.extend_from_slice(b"u64");
strings.push(0);
let n_foo = strings.len() as u32;
strings.extend_from_slice(b"foo");
strings.push(0);
let mut types: Vec<u8> = Vec::new();
types.extend_from_slice(&kfunc_btf_type_header(n_u64, 1, 0, 8));
types.extend_from_slice(&64u32.to_le_bytes());
types.extend_from_slice(&kfunc_btf_type_header(n_foo, 14, 0, 1));
types.extend_from_slice(&1u32.to_le_bytes());
let mut blob: Vec<u8> = Vec::new();
blob.extend_from_slice(&0xEB9F_u16.to_le_bytes());
blob.push(1);
blob.push(0);
blob.extend_from_slice(&24u32.to_le_bytes());
blob.extend_from_slice(&0u32.to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(strings.len() as u32).to_le_bytes());
blob.extend_from_slice(&types);
blob.extend_from_slice(&strings);
let btf = Btf::from_bytes(&blob).expect("parse btf");
assert_eq!(find_extern_func_btf_id(&btf, "foo"), None);
assert_eq!(find_extern_func_btf_id(&btf, "absent"), None);
}
fn pre_reloc_subprog_call_bytes() -> [u8; 8] {
[0x85, 0x10, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]
}
fn subprog_nop_bytes() -> [u8; 8] {
[0xbf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
}
#[test]
fn patch_subprog_calls_happy_path_rewrites_imm() {
let callee_name = "my_subprog";
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(callee_name.as_bytes());
strtab.push(0);
let callee_st_value: u64 = 16;
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_FUNC),
1, callee_st_value,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_subprog_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
text.extend_from_slice(&subprog_nop_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_subprog_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
BpfInsn::from_le_bytes(subprog_nop_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
assert_eq!(text_concat[0].imm, -1);
patch_subprog_calls(&mut text_concat, &elf, §ion_bases);
assert_eq!(
text_concat[0].imm, 1,
"imm patched to callee_pc - call_pc - 1"
);
assert_eq!(
text_concat[0].src_reg(),
BPF_PSEUDO_CALL,
"src_reg untouched (subprog calls keep BPF_PSEUDO_CALL)"
);
assert_eq!(text_concat[0].code, 0x85, "opcode untouched");
}
#[test]
fn patch_subprog_calls_skips_non_minus_one_imm() {
let callee_name = "static_subprog";
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(callee_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_LOCAL, syms::STT_FUNC),
1,
0,
0,
));
let mut call = pre_reloc_subprog_call_bytes();
call[4..8].copy_from_slice(&5i32.to_le_bytes());
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&call);
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(call),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
assert_eq!(text_concat[0].imm, 5);
patch_subprog_calls(&mut text_concat, &elf, §ion_bases);
assert_eq!(text_concat[0].imm, 5, "non-(-1) imm must stay untouched");
}
#[test]
fn patch_subprog_calls_skips_stt_notype_symbol() {
let kf_name = "bpf_some_kfunc";
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_subprog_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_subprog_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_subprog_calls(&mut text_concat, &elf, §ion_bases);
assert_eq!(
text_concat[0].imm, -1,
"STT_NOTYPE / SHN_UNDEF kfunc shape must not be touched"
);
}
#[test]
fn patch_subprog_calls_skips_callee_section_outside_section_bases() {
let callee_name = "subprog_in_other_section";
let mut strtab: Vec<u8> = vec![0];
let name_off = strtab.len() as u32;
strtab.extend_from_slice(callee_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
name_off,
st_info(syms::STB_GLOBAL, syms::STT_FUNC),
5,
0,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&pre_reloc_subprog_call_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".other", sh::SHT_PROGBITS).data(vec![0u8; 8]),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(pre_reloc_subprog_call_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
patch_subprog_calls(&mut text_concat, &elf, §ion_bases);
assert_eq!(
text_concat[0].imm, -1,
"callee section outside section_bases must skip patching"
);
}
fn pseudo_call_bytes(imm: i32) -> [u8; 8] {
let mut out = [0u8; 8];
out[0] = 0x85; out[1] = 0x10; out[2..4].copy_from_slice(&0i16.to_le_bytes());
out[4..8].copy_from_slice(&imm.to_le_bytes());
out
}
fn pseudo_kfunc_call_bytes(imm: i32) -> [u8; 8] {
let mut out = [0u8; 8];
out[0] = 0x85;
out[1] = 0x20; out[2..4].copy_from_slice(&0i16.to_le_bytes());
out[4..8].copy_from_slice(&imm.to_le_bytes());
out
}
#[allow(clippy::too_many_arguments)]
fn build_subprog_test_scaffold(
sym_name: &str,
sym_st_type_bind: u8,
sym_st_shndx: u16,
call_bytes: [u8; 8],
) -> (Vec<u8>, Vec<BpfInsn>, HashMap<u32, usize>) {
let mut strtab: Vec<u8> = vec![0];
let n_sym = strtab.len() as u32;
strtab.extend_from_slice(sym_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(n_sym, sym_st_type_bind, sym_st_shndx, 0, 0));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&call_bytes);
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 1).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
],
h::EM_BPF,
h::ET_REL,
);
let text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(call_bytes),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0); (blob, text_concat, section_bases)
}
#[test]
fn build_subprog_returns_happy_path_emits_one() {
let (blob, text_concat, section_bases) = build_subprog_test_scaffold(
"scx_alloc_internal",
st_info(syms::STB_GLOBAL, syms::STT_FUNC),
1, pseudo_call_bytes(123),
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let out = build_subprog_returns(&text_concat, &elf, §ion_bases);
assert_eq!(out.len(), 1, "happy path: expected 1 entry, got {out:?}");
assert_eq!(
out[0].insn_offset, 0,
"SubprogReturn must point at the call PC"
);
}
#[test]
fn build_subprog_returns_skips_pseudo_kfunc_call() {
let (blob, text_concat, section_bases) = build_subprog_test_scaffold(
"scx_alloc_internal",
st_info(syms::STB_GLOBAL, syms::STT_FUNC),
1,
pseudo_kfunc_call_bytes(0),
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let out = build_subprog_returns(&text_concat, &elf, §ion_bases);
assert!(
out.is_empty(),
"BPF_PSEUDO_KFUNC_CALL must not seed a SubprogReturn: {out:?}"
);
}
#[test]
fn build_subprog_returns_skips_stt_object() {
let (blob, text_concat, section_bases) = build_subprog_test_scaffold(
"scx_alloc_internal",
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
1,
pseudo_call_bytes(0),
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let out = build_subprog_returns(&text_concat, &elf, §ion_bases);
assert!(
out.is_empty(),
"STT_OBJECT symbol must not seed a SubprogReturn: {out:?}"
);
}
#[test]
fn build_subprog_returns_skips_non_allowlist_name() {
let (blob, text_concat, section_bases) = build_subprog_test_scaffold(
"ktstr_some_unrelated_helper",
st_info(syms::STB_GLOBAL, syms::STT_FUNC),
1,
pseudo_call_bytes(0),
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let out = build_subprog_returns(&text_concat, &elf, §ion_bases);
assert!(
out.is_empty(),
"non-allowlist subprog name must not seed a SubprogReturn: {out:?}"
);
}
fn ld_imm64_first_slot_bytes(imm: i32) -> [u8; 8] {
let mut out = [0u8; 8];
out[0] = 0x18;
out[1] = 0; out[2..4].copy_from_slice(&0i16.to_le_bytes());
out[4..8].copy_from_slice(&imm.to_le_bytes());
out
}
fn ld_imm64_second_slot_bytes() -> [u8; 8] {
[0u8; 8]
}
fn append_btf_datasec(
types: &mut Vec<u8>,
name_off: u32,
section_size: u32,
vsi_entries: &[(u32, u32, u32)],
) {
const BTF_KIND_DATASEC: u32 = 15;
let vlen = vsi_entries.len() as u32;
let info = ((BTF_KIND_DATASEC << 24) & 0x1f00_0000) | (vlen & 0xffff);
types.extend_from_slice(&name_off.to_le_bytes());
types.extend_from_slice(&info.to_le_bytes());
types.extend_from_slice(§ion_size.to_le_bytes());
for (type_id, offset, size) in vsi_entries {
types.extend_from_slice(&type_id.to_le_bytes());
types.extend_from_slice(&offset.to_le_bytes());
types.extend_from_slice(&size.to_le_bytes());
}
}
fn build_datasec_btf_blob(sec_name: &str) -> (Vec<u8>, u32) {
let mut strings: Vec<u8> = vec![0];
let n_u64 = strings.len() as u32;
strings.extend_from_slice(b"u64");
strings.push(0);
let n_sec = strings.len() as u32;
strings.extend_from_slice(sec_name.as_bytes());
strings.push(0);
let mut types: Vec<u8> = Vec::new();
types.extend_from_slice(&kfunc_btf_type_header(n_u64, 1, 0, 8));
let int_data: u32 = 64;
types.extend_from_slice(&int_data.to_le_bytes());
append_btf_datasec(&mut types, n_sec, 32, &[]);
let mut blob: Vec<u8> = Vec::new();
blob.extend_from_slice(&0xEB9F_u16.to_le_bytes());
blob.push(1);
blob.push(0);
blob.extend_from_slice(&24u32.to_le_bytes());
blob.extend_from_slice(&0u32.to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(strings.len() as u32).to_le_bytes());
blob.extend_from_slice(&types);
blob.extend_from_slice(&strings);
(blob, 2)
}
#[allow(clippy::too_many_arguments)]
fn build_datasec_test_scaffold(
bss_name: &'static str,
sec_name_in_btf: &str,
r_type: u32,
r_offset: u64,
sym_st_value: u64,
sym_st_shndx: u16,
sym_st_type_bind: u8,
imm_value: i32,
) -> (Vec<u8>, Vec<u8>, Vec<BpfInsn>, HashMap<u32, usize>) {
let (btf_blob, _ds_id) = build_datasec_btf_blob(sec_name_in_btf);
let mut strtab: Vec<u8> = vec![0];
let n_sym = strtab.len() as u32;
strtab.extend_from_slice(b"global_var");
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
n_sym,
sym_st_type_bind,
sym_st_shndx,
sym_st_value,
0,
));
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&ld_imm64_first_slot_bytes(imm_value));
text.extend_from_slice(&ld_imm64_second_slot_bytes());
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(r_offset, 1, r_type).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(bss_name, sh::SHT_PROGBITS).data(vec![0u8; 32]),
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(3)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(4)
.info(2) .entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob.clone()),
],
h::EM_BPF,
h::ET_REL,
);
let text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(ld_imm64_first_slot_bytes(imm_value)),
BpfInsn::from_le_bytes(ld_imm64_second_slot_bytes()),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(2, 0);
(blob, btf_blob, text_concat, section_bases)
}
#[test]
fn build_datasec_pointers_rejects_non_r_bpf_64_64() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
10, 0,
0,
1, st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(out.is_empty(), "non-R_BPF_64_64 reloc must be skipped");
}
#[test]
fn build_datasec_pointers_rejects_non_multiple_of_8_offset() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
1,
4, 0,
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(
out.is_empty(),
"r_offset=4 (not multiple of 8) must be rejected"
);
}
#[test]
fn build_datasec_pointers_rejects_offset_past_section_size() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
1,
100, 0,
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(
out.is_empty(),
"r_offset past section size must be rejected"
);
}
#[test]
fn build_datasec_pointers_rejects_non_ld_imm64_opcode() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
1,
16, 0,
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(
out.is_empty(),
"reloc on non-LD_IMM64 opcode must be rejected"
);
}
#[test]
fn build_datasec_pointers_rejects_special_section_index_symbols() {
for shndx in [0u16, 0xFFF1, 0xFFF2] {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
1,
0,
0,
shndx,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(
out.is_empty(),
"symbol with st_shndx={shndx:#x} must be rejected"
);
}
}
#[test]
fn build_datasec_pointers_rejects_section_not_in_btf() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".rodata", 1,
0,
0,
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(
out.is_empty(),
"section name not in BTF as DATASEC must be rejected"
);
}
#[test]
fn build_datasec_pointers_rejects_st_value_past_u32_max() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
1,
0,
(u32::MAX as u64) + 1, 1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
0,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert!(out.is_empty(), "sym.st_value > u32::MAX must be rejected");
}
#[test]
fn build_datasec_pointers_happy_path_emits_pointer() {
let (blob, btf_blob, text_concat, section_bases) = build_datasec_test_scaffold(
".bss",
".bss",
1, 0, 0, 1, st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
16, );
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let out = build_datasec_pointers(&text_concat, &btf, &elf, §ion_bases);
assert_eq!(out.len(), 1, "all gates pass → exactly one entry");
assert_eq!(out[0].insn_offset, 0, "PC = base + r_offset/8 = 0");
assert_eq!(
out[0].datasec_type_id, 2,
"datasec id is 2 (per build_datasec_btf_blob)"
);
assert_eq!(
out[0].base_offset, 16,
"base_offset = imm (16) + st_value (0) = 16"
);
}
#[test]
fn find_datasec_btf_id_filters_to_datasec_kind() {
let mut strings: Vec<u8> = vec![0];
let n_bss = strings.len() as u32;
strings.extend_from_slice(b".bss");
strings.push(0);
let mut types: Vec<u8> = Vec::new();
types.extend_from_slice(&kfunc_btf_type_header(n_bss, 1, 0, 4));
let int_data: u32 = 32;
types.extend_from_slice(&int_data.to_le_bytes());
types.extend_from_slice(&kfunc_btf_type_header(n_bss, 14, 0, 1));
let var_linkage: u32 = 1; types.extend_from_slice(&var_linkage.to_le_bytes());
append_btf_datasec(&mut types, n_bss, 8, &[]);
let mut blob: Vec<u8> = Vec::new();
blob.extend_from_slice(&0xEB9F_u16.to_le_bytes());
blob.push(1);
blob.push(0);
blob.extend_from_slice(&24u32.to_le_bytes());
blob.extend_from_slice(&0u32.to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(types.len() as u32).to_le_bytes());
blob.extend_from_slice(&(strings.len() as u32).to_le_bytes());
blob.extend_from_slice(&types);
blob.extend_from_slice(&strings);
let btf = Btf::from_bytes(&blob).expect("parse btf");
assert_eq!(
find_datasec_btf_id(&btf, ".bss"),
Some(3),
"kind filter must skip past Int/Var to the Datasec",
);
assert_eq!(find_datasec_btf_id(&btf, ".rodata"), None);
}
#[test]
fn patch_kfunc_calls_skips_already_relocated_src_reg() {
let kf_name = "bpf_task_acquire";
let (btf_blob, _expected_func_id, _t_id) = build_kfunc_btf_blob(kf_name);
let btf = Btf::from_bytes(&btf_blob).expect("parse btf");
let mut strtab: Vec<u8> = vec![0];
let kf_str_off = strtab.len() as u32;
strtab.extend_from_slice(kf_name.as_bytes());
strtab.push(0);
let mut symtab: Vec<u8> = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
kf_str_off,
st_info(syms::STB_GLOBAL, syms::STT_NOTYPE),
0,
0,
0,
));
let already_relocated_call: [u8; 8] = [0x85, 0x20, 0x00, 0x00, 42, 0x00, 0x00, 0x00];
let mut text: Vec<u8> = Vec::new();
text.extend_from_slice(&already_relocated_call);
text.extend_from_slice(&kfunc_exit_bytes());
let rel_data: Vec<u8> = elf64_rel(0, 1, 10).to_vec();
let blob = build_elf64(
vec![
SecSpec::new(".text", sh::SHT_PROGBITS)
.flags(sh::SHF_EXECINSTR.into())
.data(text),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
SecSpec::new(".rel.text", sh::SHT_REL)
.data(rel_data)
.link(3)
.info(1)
.entsize(16),
SecSpec::new(".BTF", sh::SHT_PROGBITS).data(btf_blob),
],
h::EM_BPF,
h::ET_REL,
);
let elf = goblin::elf::Elf::parse(&blob).expect("parse elf");
let mut text_concat: Vec<BpfInsn> = vec![
BpfInsn::from_le_bytes(already_relocated_call),
BpfInsn::from_le_bytes(kfunc_exit_bytes()),
];
let mut section_bases: HashMap<u32, usize> = HashMap::new();
section_bases.insert(1, 0);
assert_eq!(text_concat[0].code, 0x85);
assert_eq!(text_concat[0].src_reg(), BPF_PSEUDO_KFUNC_CALL);
assert_eq!(text_concat[0].imm, 42);
patch_kfunc_calls(&mut text_concat, &btf, &elf, §ion_bases);
assert_eq!(
text_concat[0].src_reg(),
BPF_PSEUDO_KFUNC_CALL,
"src_reg must survive unmodified",
);
assert_eq!(
text_concat[0].imm, 42,
"imm must survive unmodified — kernel BTF id preserved",
);
}
#[test]
fn build_fwd_index_indexes_single_btf_structs() {
let mut strings = vec![0u8];
let n_int = push_btf_name(&mut strings, "u64");
let n_foo = push_btf_name(&mut strings, "foo");
let n_bar = push_btf_name(&mut strings, "bar");
let n_x = push_btf_name(&mut strings, "x");
let types = vec![
SynKind::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_foo,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
SynKind::Struct {
name_off: n_bar,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf_full(&types, &strings);
let btf = Arc::new(Btf::from_bytes(&blob).expect("parse btf"));
let btfs = vec![btf];
let index = build_fwd_index(&btfs);
assert_eq!(
index.get("foo"),
Some(&FwdIndexEntry {
btfs_idx: 0,
type_id: 2,
})
);
assert_eq!(
index.get("bar"),
Some(&FwdIndexEntry {
btfs_idx: 0,
type_id: 3,
})
);
assert!(!index.contains_key("u64"), "Int names must not be indexed");
}
#[test]
fn build_fwd_index_first_write_wins_on_duplicate_name() {
let mut strings_0 = vec![0u8];
let n_int_0 = push_btf_name(&mut strings_0, "u64");
let n_foo_0 = push_btf_name(&mut strings_0, "foo");
let n_x_0 = push_btf_name(&mut strings_0, "x");
let types_0 = vec![
SynKind::Int {
name_off: n_int_0,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_foo_0,
size: 8,
members: vec![SynMember {
name_off: n_x_0,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob_0 = build_btf_full(&types_0, &strings_0);
let btf_0 = Arc::new(Btf::from_bytes(&blob_0).expect("parse btf 0"));
let mut strings_1 = vec![0u8];
let n_int_1 = push_btf_name(&mut strings_1, "u64");
let n_foo_1 = push_btf_name(&mut strings_1, "foo");
let n_y_1 = push_btf_name(&mut strings_1, "y");
let types_1 = vec![
SynKind::Int {
name_off: n_int_1,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_foo_1,
size: 16,
members: vec![SynMember {
name_off: n_y_1,
type_id: 1,
byte_offset: 8,
}],
},
];
let blob_1 = build_btf_full(&types_1, &strings_1);
let btf_1 = Arc::new(Btf::from_bytes(&blob_1).expect("parse btf 1"));
let btfs = vec![btf_0, btf_1];
let index = build_fwd_index(&btfs);
assert_eq!(
index.get("foo"),
Some(&FwdIndexEntry {
btfs_idx: 0,
type_id: 2,
}),
"first-write-wins: BTF #0 wins on duplicate name"
);
}
#[test]
fn build_fwd_index_skips_anonymous_structs() {
let mut strings = vec![0u8];
let n_int = push_btf_name(&mut strings, "u64");
let n_x = push_btf_name(&mut strings, "x");
let types = vec![
SynKind::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: 0,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf_full(&types, &strings);
let btf = Arc::new(Btf::from_bytes(&blob).expect("parse btf"));
let btfs = vec![btf];
let index = build_fwd_index(&btfs);
assert!(
index.is_empty(),
"anonymous structs must not be indexed: {index:?}"
);
}
#[test]
fn build_fwd_index_skips_fwd_when_complete_body_in_later_btf() {
let mut strings_0 = vec![0u8];
let n_int_0 = push_btf_name(&mut strings_0, "u64");
let n_shared_0 = push_btf_name(&mut strings_0, "shared");
let types_0 = vec![
SynKind::Int {
name_off: n_int_0,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Fwd {
name_off: n_shared_0,
kind_flag: 0,
},
];
let blob_0 = build_btf_full(&types_0, &strings_0);
let btf_0 = Arc::new(Btf::from_bytes(&blob_0).expect("parse btf 0"));
let mut strings_1 = vec![0u8];
let n_int_1 = push_btf_name(&mut strings_1, "u64");
let n_shared_1 = push_btf_name(&mut strings_1, "shared");
let n_v_1 = push_btf_name(&mut strings_1, "v");
let types_1 = vec![
SynKind::Int {
name_off: n_int_1,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_shared_1,
size: 8,
members: vec![SynMember {
name_off: n_v_1,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob_1 = build_btf_full(&types_1, &strings_1);
let btf_1 = Arc::new(Btf::from_bytes(&blob_1).expect("parse btf 1"));
let ty_0_id_2 = btf_0
.resolve_type_by_id(2)
.expect("BTF #0 id 2 must resolve");
assert!(
matches!(ty_0_id_2, Type::Fwd(_)),
"BTF #0 id 2 must be Fwd, got {ty_0_id_2:?}"
);
let btfs = vec![btf_0, btf_1];
let index = build_fwd_index(&btfs);
assert_eq!(
index.get("shared"),
Some(&FwdIndexEntry {
btfs_idx: 1,
type_id: 2,
}),
"Fwd in BTF #0 must not register; complete body in BTF #1 wins: {index:?}"
);
assert_eq!(
index.len(),
1,
"only the BTF #1 complete body should be indexed: {index:?}"
);
}
#[test]
fn build_fwd_index_handles_empty_name_fwd_without_panic() {
let mut strings = vec![0u8];
let n_int = push_btf_name(&mut strings, "u64");
let n_named = push_btf_name(&mut strings, "named");
let n_x = push_btf_name(&mut strings, "x");
let types = vec![
SynKind::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Fwd {
name_off: 0,
kind_flag: 0,
},
SynKind::Struct {
name_off: n_named,
size: 8,
members: vec![SynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = build_btf_full(&types, &strings);
let btf = Arc::new(Btf::from_bytes(&blob).expect("parse btf"));
let ty_id_2 = btf.resolve_type_by_id(2).expect("BTF id 2 must resolve");
assert!(
matches!(ty_id_2, Type::Fwd(_)),
"BTF id 2 must be Fwd, got {ty_id_2:?}"
);
let btfs = vec![btf];
let index = build_fwd_index(&btfs);
assert!(
!index.contains_key(""),
"empty-string key must not appear (anonymous Fwd): {index:?}"
);
assert_eq!(
index.get("named"),
Some(&FwdIndexEntry {
btfs_idx: 0,
type_id: 3,
}),
"named struct at id 3 must register after the empty-named Fwd at id 2: {index:?}"
);
assert_eq!(
index.len(),
1,
"only the named struct should be indexed: {index:?}"
);
}
#[test]
fn build_cast_analysis_indexes_cross_object_struct_body() {
let mut strings_a = vec![0u8];
let n_int_a = push_btf_name(&mut strings_a, "u64");
let n_cgx_a = push_btf_name(&mut strings_a, "cgx_target");
let n_t_a = push_btf_name(&mut strings_a, "outer_a");
let n_field_a = push_btf_name(&mut strings_a, "ptr_to_target");
let n_func_a = push_btf_name(&mut strings_a, "func_a");
let n_text_a = push_btf_name(&mut strings_a, ".text");
let types_a = vec![
SynKind::Int {
name_off: n_int_a,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_t_a,
size: 8,
members: vec![SynMember {
name_off: n_field_a,
type_id: 1,
byte_offset: 0,
}],
},
SynKind::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: 0,
type_id: 1,
}],
},
SynKind::Func {
name_off: n_func_a,
type_id: 3,
linkage: 1,
},
];
let _ = n_cgx_a; let btf_blob_a = build_btf_full(&types_a, &strings_a);
let insns_a = vec![exit_insn()];
let text_a = insns_to_text_bytes(&insns_a);
let btf_ext_a = build_btf_ext(n_text_a, &[(0, 3)], 8);
let inner_a = build_full_bpf_object_elf(text_a, btf_blob_a, btf_ext_a);
let mut strings_b = vec![0u8];
let n_int_b = push_btf_name(&mut strings_b, "u64");
let n_cgx_b = push_btf_name(&mut strings_b, "cgx_target");
let n_marker_b = push_btf_name(&mut strings_b, "marker");
let n_func_b = push_btf_name(&mut strings_b, "func_b");
let n_text_b = push_btf_name(&mut strings_b, ".text");
let types_b = vec![
SynKind::Int {
name_off: n_int_b,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
SynKind::Struct {
name_off: n_cgx_b,
size: 8,
members: vec![SynMember {
name_off: n_marker_b,
type_id: 1,
byte_offset: 0,
}],
},
SynKind::FuncProto {
return_type_id: 0,
params: vec![SynParam {
name_off: 0,
type_id: 1,
}],
},
SynKind::Func {
name_off: n_func_b,
type_id: 3,
linkage: 1,
},
];
let btf_blob_b = build_btf_full(&types_b, &strings_b);
let insns_b = vec![exit_insn()];
let text_b = insns_to_text_bytes(&insns_b);
let btf_ext_b = build_btf_ext(n_text_b, &[(0, 3)], 8);
let inner_b = build_full_bpf_object_elf(text_b, btf_blob_b, btf_ext_b);
let strtab = b"\0obj_a\0obj_b\0".to_vec();
let mut symtab = Vec::new();
symtab.extend_from_slice(&elf64_sym(0, 0, 0, 0, 0));
symtab.extend_from_slice(&elf64_sym(
1,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
1, 0,
inner_a.len() as u64,
));
symtab.extend_from_slice(&elf64_sym(
7,
st_info(syms::STB_GLOBAL, syms::STT_OBJECT),
1,
inner_a.len() as u64,
inner_b.len() as u64,
));
let mut bpf_objs_data = Vec::new();
bpf_objs_data.extend_from_slice(&inner_a);
bpf_objs_data.extend_from_slice(&inner_b);
let outer = build_elf64(
vec![
SecSpec::new(".bpf.objs", sh::SHT_PROGBITS).data(bpf_objs_data),
SecSpec::new(".strtab", sh::SHT_STRTAB).data(strtab),
SecSpec::new(".symtab", sh::SHT_SYMTAB)
.data(symtab)
.link(2)
.entsize(24),
],
h::EM_X86_64,
h::ET_REL,
);
let out = build_cast_analysis_from_bytes(&outer);
assert_eq!(
out.btfs.len(),
2,
"both embedded objects' BTFs must be retained: {}",
out.btfs.len()
);
let cgx_hit = out.fwd_index.get("cgx_target");
assert_eq!(
cgx_hit,
Some(&FwdIndexEntry {
btfs_idx: 1,
type_id: 2,
}),
"cross-BTF index must point cgx_target to BTF #1 at type id 2: {:?}",
out.fwd_index
);
assert_eq!(
out.fwd_index.get("outer_a"),
Some(&FwdIndexEntry {
btfs_idx: 0,
type_id: 2,
}),
"object A's struct outer_a must be indexed in BTF #0 at id 2"
);
}
#[test]
fn lazy_cast_map_get_full_returns_none_when_no_scheduler() {
let lazy = LazyCastMap::new(None);
assert!(
lazy.get_full().is_none(),
"no-scheduler builder must short-circuit `.get_full()` to None",
);
}
#[test]
fn cached_cast_analysis_concurrent_callers_share_one_oncelock_init() {
use std::sync::{Arc as StdArc, Barrier};
let blob = build_recovers_arena_cast_outer_elf();
let dir = tempfile::tempdir().expect("tempdir");
let p = dir.path().join("concurrent.bin");
std::fs::write(&p, &blob).expect("write");
const N_THREADS: usize = 8;
let barrier = StdArc::new(Barrier::new(N_THREADS));
let path = p.clone();
let results: Vec<Arc<CastAnalysisOutput>> = std::thread::scope(|s| {
let handles: Vec<_> = (0..N_THREADS)
.map(|_| {
let barrier = barrier.clone();
let path = path.clone();
s.spawn(move || {
barrier.wait();
cached_cast_analysis_for_scheduler(&path)
.expect("non-empty fixture must produce Some")
})
})
.collect();
handles.into_iter().map(|h| h.join().unwrap()).collect()
});
assert_eq!(results.len(), N_THREADS);
let first = &results[0];
for (i, other) in results.iter().enumerate().skip(1) {
assert!(
Arc::ptr_eq(first, other),
"thread {i}: Arc must be pointer-equal to thread 0's; \
OnceLock dedup did NOT fire across concurrent callers",
);
}
}