use super::super::cast_analysis::AddrSpace;
use super::*;
fn test_btf() -> Option<Btf> {
let path = crate::monitor::find_test_vmlinux()?;
crate::monitor::btf_offsets::load_btf_from_path(&path).ok()
}
#[test]
fn read_uint_le_padding() {
assert_eq!(read_uint_le(&[0x12, 0x34]), 0x3412);
assert_eq!(read_uint_le(&[0xff]), 0xff);
assert_eq!(read_uint_le(&[0xff; 8]), u64::MAX);
}
#[test]
fn sign_extend_basic() {
assert_eq!(sign_extend(0xFF, 8), u64::MAX);
assert_eq!(sign_extend(0xFFFF, 16), u64::MAX);
assert_eq!(sign_extend(0x7F, 8), 0x7F);
assert_eq!(sign_extend(123, 0), 123);
assert_eq!(sign_extend(u64::MAX, 64), u64::MAX);
}
#[test]
fn render_int_truncated() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("int") else {
crate::report::test_skip("BTF missing 'int' type");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'int' to empty id list");
return;
};
let v = render_value(&btf, id, &[]);
assert!(matches!(
v,
RenderedValue::Truncated {
needed: 4,
had: 0,
..
}
));
}
#[test]
fn render_truncated_unsigned_int() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("u32") else {
crate::report::test_skip("BTF missing 'u32' typedef");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'u32' to empty id list");
return;
};
let v = render_value(&btf, id, &[0xff, 0xff]);
assert!(matches!(
v,
RenderedValue::Truncated {
needed: 4,
had: 2,
..
}
));
}
#[test]
fn display_int_uint_bool() {
assert_eq!(
format!(
"{}",
RenderedValue::Int {
bits: 32,
value: -7
}
),
"-7"
);
assert_eq!(
format!(
"{}",
RenderedValue::Uint {
bits: 64,
value: 42
}
),
"42"
);
assert_eq!(format!("{}", RenderedValue::Bool { value: true }), "true");
assert_eq!(format!("{}", RenderedValue::Bool { value: false }), "false");
}
#[test]
fn display_char_printable_and_nonprintable() {
assert_eq!(format!("{}", RenderedValue::Char { value: b'A' }), "'A'");
assert_eq!(format!("{}", RenderedValue::Char { value: 0x00 }), "0x00");
assert_eq!(format!("{}", RenderedValue::Char { value: 0x7f }), "0x7f");
assert_eq!(format!("{}", RenderedValue::Char { value: 0xab }), "0xab");
}
#[test]
fn display_float() {
assert_eq!(
format!(
"{}",
RenderedValue::Float {
bits: 64,
value: 1.5
}
),
"1.5"
);
}
#[test]
fn display_enum_with_and_without_variant() {
assert_eq!(
format!(
"{}",
RenderedValue::Enum {
bits: 32,
value: 1,
variant: Some("RUNNING".into()),
}
),
"RUNNING (1)"
);
assert_eq!(
format!(
"{}",
RenderedValue::Enum {
bits: 32,
value: 99,
variant: None,
}
),
"99"
);
}
#[test]
fn display_ptr_is_lowercase_hex() {
assert_eq!(
format!(
"{}",
RenderedValue::Ptr {
value: 0xffff_8000_1234_5678,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
}
),
"0xffff800012345678"
);
assert_eq!(
format!(
"{}",
RenderedValue::Ptr {
value: 0,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
}
),
"0x0"
);
assert_eq!(
format!(
"{}",
RenderedValue::CpuList {
cpus: "0-7".to_string()
}
),
"cpus={0-7}"
);
assert_eq!(
format!(
"{}",
RenderedValue::CpuList {
cpus: String::new()
}
),
"cpus={}"
);
}
#[test]
fn display_bytes_passes_through() {
assert_eq!(
format!(
"{}",
RenderedValue::Bytes {
hex: "12 34 ab".into()
}
),
"12 34 ab"
);
}
#[test]
fn display_unsupported_includes_reason() {
assert_eq!(
format!(
"{}",
RenderedValue::Unsupported {
reason: "void".into()
}
),
"<unsupported: void>"
);
}
#[test]
fn display_truncated_with_bytes_partial() {
let v = RenderedValue::Truncated {
needed: 4,
had: 2,
partial: Box::new(RenderedValue::Bytes {
hex: "12 34".into(),
}),
};
assert_eq!(format!("{v}"), "<truncated needed=4 had=2> 12 34");
}
#[test]
fn display_struct_with_named_members() {
let v = RenderedValue::Struct {
type_name: Some("task_ctx".into()),
members: vec![
RenderedMember {
name: "weight".into(),
value: RenderedValue::Uint {
bits: 32,
value: 1024,
},
},
RenderedMember {
name: "last_runnable_at".into(),
value: RenderedValue::Uint {
bits: 64,
value: 12_345_678_901_234,
},
},
],
};
assert_eq!(
format!("{v}"),
"task_ctx{weight=1024, last_runnable_at=12345678901234}"
);
}
#[test]
fn display_struct_anonymous_uses_struct_brace() {
let v = RenderedValue::Struct {
type_name: None,
members: vec![RenderedMember {
name: "x".into(),
value: RenderedValue::Int { bits: 32, value: 7 },
}],
};
assert_eq!(format!("{v}"), "{x=7}");
}
#[test]
fn display_empty_struct_is_one_line() {
let v = RenderedValue::Struct {
type_name: Some("empty".into()),
members: vec![],
};
assert_eq!(format!("{v}"), "empty{}");
}
#[test]
fn display_anonymous_member_uses_anon_marker() {
let v = RenderedValue::Struct {
type_name: Some("u".into()),
members: vec![RenderedMember {
name: String::new(),
value: RenderedValue::Uint { bits: 32, value: 5 },
}],
};
assert_eq!(format!("{v}"), "u{<anon>=5}");
}
#[test]
fn display_nested_struct_renders_inline_when_small() {
let inner = RenderedValue::Struct {
type_name: Some("inner".into()),
members: vec![RenderedMember {
name: "a".into(),
value: RenderedValue::Uint { bits: 32, value: 1 },
}],
};
let outer = RenderedValue::Struct {
type_name: Some("outer".into()),
members: vec![RenderedMember {
name: "child".into(),
value: inner,
}],
};
assert_eq!(format!("{outer}"), "outer{child=inner{a=1}}");
}
#[test]
fn display_nested_struct_breaks_to_multiline_past_inline_budget() {
let inner_members: Vec<RenderedMember> = (0..20)
.map(|i| RenderedMember {
name: format!("field_{i:02}"),
value: RenderedValue::Uint {
bits: 64,
value: 0xdeadbeef,
},
})
.collect();
let inner = RenderedValue::Struct {
type_name: Some("inner".into()),
members: inner_members,
};
let outer = RenderedValue::Struct {
type_name: Some("outer".into()),
members: vec![RenderedMember {
name: "child".into(),
value: inner,
}],
};
let rendered = format!("{outer}");
assert!(
rendered.contains('\n'),
"over-budget nested struct must break to multi-line; got: {rendered:?}",
);
assert!(
rendered.starts_with("outer:"),
"multi-line form must lead with `outer:` breadcrumb, got: {rendered:?}",
);
assert!(
rendered.contains("3735928559"),
"inner-member values must still surface in multi-line form: {rendered:?}",
);
}
#[test]
fn display_array_scalars_inline() {
let v = RenderedValue::Array {
len: 3,
elements: vec![
RenderedValue::Uint { bits: 32, value: 1 },
RenderedValue::Uint { bits: 32, value: 2 },
RenderedValue::Uint { bits: 32, value: 3 },
],
};
assert_eq!(format!("{v}"), "[0x1, 0x2, 0x3]");
}
#[test]
fn display_array_empty() {
let v = RenderedValue::Array {
len: 0,
elements: vec![],
};
assert_eq!(format!("{v}"), "[]");
}
#[test]
fn display_array_truncated_marker() {
let v = RenderedValue::Array {
len: 5,
elements: vec![
RenderedValue::Uint { bits: 32, value: 1 },
RenderedValue::Uint { bits: 32, value: 2 },
],
};
assert_eq!(format!("{v}"), "[[0..1]={0x1, 0x2}] /* 2 of 5 shown */");
}
#[test]
fn display_array_of_structs_block_style() {
let elem = RenderedValue::Struct {
type_name: Some("e".into()),
members: vec![RenderedMember {
name: "v".into(),
value: RenderedValue::Uint {
bits: 32,
value: 10,
},
}],
};
let v = RenderedValue::Array {
len: 1,
elements: vec![elem],
};
assert_eq!(format!("{v}"), "[\n [0] e{v=10}\n]");
}
#[test]
fn display_truncated_with_struct_partial_shows_decoded_members() {
let partial = RenderedValue::Struct {
type_name: Some("partial_struct".into()),
members: vec![
RenderedMember {
name: "a".into(),
value: RenderedValue::Uint { bits: 32, value: 7 },
},
RenderedMember {
name: "b".into(),
value: RenderedValue::Truncated {
needed: 4,
had: 0,
partial: Box::new(RenderedValue::Bytes { hex: "".into() }),
},
},
],
};
let v = RenderedValue::Truncated {
needed: 8,
had: 4,
partial: Box::new(partial),
};
let out = format!("{v}");
assert!(
out.starts_with("<truncated needed=8 had=4> partial_struct:"),
"expected breadcrumb form, got: {out}"
);
assert!(out.contains("a=7"));
assert!(
!out.contains("truncated needed=4 had=0"),
"had=0 truncated fields must be suppressed: {out}"
);
}
#[test]
fn truncated_int_carries_bytes_partial() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("u32") else {
crate::report::test_skip("BTF missing 'u32'");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'u32' to empty id list");
return;
};
let v = render_value(&btf, id, &[0x12, 0x34]);
match v {
RenderedValue::Truncated {
needed,
had,
partial,
} => {
assert_eq!(needed, 4);
assert_eq!(had, 2);
match *partial {
RenderedValue::Bytes { hex } => {
assert_eq!(hex, "12 34");
}
other => panic!("expected Bytes partial, got {other:?}"),
}
}
other => panic!("expected Truncated, got {other:?}"),
}
}
#[test]
fn truncated_struct_carries_struct_partial_with_decoded_members() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("task_struct") else {
crate::report::test_skip("BTF missing 'task_struct'");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'task_struct' to empty id list");
return;
};
let v = render_value(&btf, id, &[0u8; 16]);
match v {
RenderedValue::Truncated {
needed,
had,
partial,
} => {
assert!(needed > 16, "expected struct size > 16, got {needed}");
assert_eq!(had, 16);
match *partial {
RenderedValue::Struct { type_name, members } => {
assert_eq!(type_name.as_deref(), Some("task_struct"));
assert!(
!members.is_empty(),
"partial struct must carry SOME decoded members"
);
}
other => panic!("expected Struct partial, got {other:?}"),
}
}
other => panic!("expected Truncated, got {other:?}"),
}
}
#[test]
fn truncated_array_element_carries_bytes_partial() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("cpumask") else {
crate::report::test_skip("BTF missing 'cpumask'");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'cpumask' to empty id list");
return;
};
let v = render_value(&btf, id, &[0u8]);
match v {
RenderedValue::Truncated { partial, .. } => {
match *partial {
RenderedValue::Struct { members, .. } => {
assert!(!members.is_empty());
}
other => panic!("expected Struct partial, got {other:?}"),
}
}
RenderedValue::Struct { .. } => {}
other => panic!("expected Truncated or Struct, got {other:?}"),
}
}
fn load_probe_btf_and_bss_id() -> (Btf, u32) {
let probe_obj = std::path::PathBuf::from(env!("OUT_DIR")).join("probe.o");
let btf = crate::monitor::btf_offsets::load_btf_from_path(&probe_obj).unwrap_or_else(|e| {
panic!(
"load_btf_from_path({}) failed: {e}. \
build.rs always produces probe.o; a missing or \
unparseable artifact means the build pipeline is \
broken.",
probe_obj.display()
)
});
let ids = btf
.resolve_ids_by_name(".bss")
.expect("probe BTF must carry a `.bss` BTF_KIND_DATASEC");
for &id in &ids {
if let Ok(Type::Datasec(_)) = btf.resolve_type_by_id(id) {
return (btf, id);
}
}
panic!("probe BTF has `.bss` ids {ids:?} but none resolve to BTF_KIND_DATASEC");
}
#[test]
fn render_datasec_emits_struct_with_named_variables() {
let (btf, bss_id) = load_probe_btf_and_bss_id();
let Type::Datasec(ds) = btf.resolve_type_by_id(bss_id).unwrap() else {
panic!(".bss id did not resolve to Datasec");
};
let section_size = ds
.variables
.iter()
.map(|v| v.offset() as usize + v.size())
.max()
.expect("`.bss` Datasec must have at least one variable");
let bytes = vec![0u8; section_size];
let rendered = render_value(&btf, bss_id, &bytes);
let RenderedValue::Struct { type_name, members } = rendered else {
panic!(
"expected RenderedValue::Struct for Datasec, got something else \
— Datasec dispatch in render_value_inner must be reachable"
);
};
assert_eq!(
type_name.as_deref(),
Some(".bss"),
"section name must surface as type_name"
);
let names: std::collections::HashSet<&str> = members.iter().map(|m| m.name.as_str()).collect();
assert!(
names.contains("ktstr_err_exit_detected"),
"rendered .bss must contain `ktstr_err_exit_detected` \
(the freeze latch). Found names: {names:?}"
);
for required in [
"ktstr_pcpu_counters",
"ktstr_last_trigger_ts",
"ktstr_exit_event_stats",
] {
assert!(
names.contains(required),
"rendered .bss must contain `{required}` \
diagnostic counter. Found names: {names:?}"
);
}
for m in &members {
assert!(
!matches!(m.value, RenderedValue::Unsupported { .. }),
"member {:?} rendered as Unsupported: {:?}",
m.name,
m.value
);
assert!(
!matches!(m.value, RenderedValue::Truncated { .. }),
"member {:?} rendered as Truncated despite section_size \
buffer: {:?}",
m.name,
m.value
);
}
let latch = members
.iter()
.find(|m| m.name == "ktstr_err_exit_detected")
.expect("latch member must be present (asserted above)");
match &latch.value {
RenderedValue::Uint { bits, value } => {
assert_eq!(*bits, 32, "latch is u32 (32 bits)");
assert_eq!(*value, 0, "latch was zeroed in the buffer");
}
other => panic!("expected Uint{{32,0}} for latch, got {other:?}"),
}
}
#[test]
fn render_datasec_truncates_overrunning_variables() {
let (btf, bss_id) = load_probe_btf_and_bss_id();
let Type::Datasec(ds) = btf.resolve_type_by_id(bss_id).unwrap() else {
panic!(".bss id did not resolve to Datasec");
};
let min_var = ds
.variables
.iter()
.min_by_key(|v| v.offset())
.expect("`.bss` must have at least one variable");
let buf_size = (min_var.offset() as usize) + min_var.size();
let bytes = vec![0u8; buf_size];
let rendered = render_value(&btf, bss_id, &bytes);
let RenderedValue::Struct { type_name, members } = rendered else {
panic!("expected RenderedValue::Struct even with short buffer");
};
assert_eq!(type_name.as_deref(), Some(".bss"));
let truncated_count = members
.iter()
.filter(|m| matches!(m.value, RenderedValue::Truncated { .. }))
.count();
let decoded_count = members.len() - truncated_count;
assert!(
decoded_count >= 1,
"at least one member must decode (the variable at the smallest offset, \
which fits in buf_size={buf_size})"
);
if members.len() > 1 {
assert!(
truncated_count >= 1,
"multi-variable .bss with short buffer must produce >= 1 \
Truncated member; got 0 from {members:?}"
);
}
}
#[test]
fn render_datasec_empty_buffer_yields_struct_with_truncated_members() {
let (btf, bss_id) = load_probe_btf_and_bss_id();
let rendered = render_value(&btf, bss_id, &[]);
let RenderedValue::Struct { members, .. } = rendered else {
panic!("expected Struct render even with empty buffer");
};
assert!(!members.is_empty(), "probe `.bss` Datasec is non-empty");
for m in &members {
assert!(
matches!(m.value, RenderedValue::Truncated { had: 0, .. }),
"member {:?} should be Truncated{{had:0}} for empty buffer, got {:?}",
m.name,
m.value
);
}
}
#[test]
fn format_cpu_list_empty_is_empty_string() {
assert_eq!(format_cpu_list(&[]), "");
}
#[test]
fn format_cpu_list_single_element() {
assert_eq!(format_cpu_list(&[5]), "5");
}
#[test]
fn format_cpu_list_contiguous_range() {
assert_eq!(format_cpu_list(&[0, 1, 2, 3, 4]), "0-4");
}
#[test]
fn format_cpu_list_two_consecutive_collapses_to_range() {
assert_eq!(format_cpu_list(&[0, 1]), "0-1");
}
#[test]
fn format_cpu_list_gaps_between_ranges() {
assert_eq!(format_cpu_list(&[0, 1, 2, 5, 7, 8, 9]), "0-2,5,7-9");
}
#[test]
fn format_cpu_list_all_singletons() {
assert_eq!(format_cpu_list(&[0, 2, 4, 6]), "0,2,4,6");
}
#[test]
fn format_cpu_list_first_range_then_singleton() {
assert_eq!(format_cpu_list(&[0, 1, 5]), "0-1,5");
}
#[test]
fn format_cpu_list_singleton_then_trailing_range() {
assert_eq!(format_cpu_list(&[0, 3, 4, 5]), "0,3-5");
}
#[test]
fn try_render_cpumask_bits_too_short_returns_none() {
assert!(try_render_cpumask_bits(&[], u32::MAX).is_none());
assert!(try_render_cpumask_bits(&[0u8; 1], u32::MAX).is_none());
assert!(try_render_cpumask_bits(&[0u8; 7], u32::MAX).is_none());
}
#[test]
fn try_render_cpumask_bits_all_zero_yields_empty_list() {
let v = try_render_cpumask_bits(&[0u8; 8], u32::MAX);
match v {
Some(RenderedValue::CpuList { cpus }) => {
assert_eq!(cpus, "", "all-zero bytes must produce empty cpu list");
}
other => panic!("expected Some(CpuList), got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_single_word_low_bits() {
let bits: u64 = 0b111;
let bytes = bits.to_le_bytes();
let v = try_render_cpumask_bits(&bytes, u32::MAX);
match v {
Some(RenderedValue::CpuList { cpus }) => assert_eq!(cpus, "0-2"),
other => panic!("expected CpuList with 0-2, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_single_word_high_bit() {
let bits: u64 = 1u64 << 63;
let bytes = bits.to_le_bytes();
let v = try_render_cpumask_bits(&bytes, u32::MAX);
match v {
Some(RenderedValue::CpuList { cpus }) => assert_eq!(cpus, "63"),
other => panic!("expected CpuList with 63, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_caps_at_nr_cpu_ids() {
let bits: u64 = 0xFFFF_FFFF_FFFF_FFFF; let bytes = bits.to_le_bytes();
let v = try_render_cpumask_bits(&bytes, 8);
match v {
Some(RenderedValue::CpuList { cpus }) => {
assert_eq!(cpus, "0-7", "max_cpus=8 must cap at cpu 7, got {cpus}");
}
other => panic!("expected CpuList with 0-7, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_caps_across_word_boundary() {
let mut bytes = [0u8; 16];
bytes[0..8].copy_from_slice(&u64::MAX.to_le_bytes());
bytes[8..16].copy_from_slice(&u64::MAX.to_le_bytes());
let v = try_render_cpumask_bits(&bytes, 8);
match v {
Some(RenderedValue::CpuList { cpus }) => assert_eq!(cpus, "0-7"),
other => panic!("expected CpuList 0-7, got {other:?}"),
}
let v = try_render_cpumask_bits(&bytes, 64);
match v {
Some(RenderedValue::CpuList { cpus }) => assert_eq!(cpus, "0-63"),
other => panic!("expected CpuList 0-63, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_multi_word_offsets() {
let mut bytes = [0u8; 16];
bytes[0..8].copy_from_slice(&1u64.to_le_bytes());
let w1: u64 = 0b11;
bytes[8..16].copy_from_slice(&w1.to_le_bytes());
let v = try_render_cpumask_bits(&bytes, u32::MAX);
match v {
Some(RenderedValue::CpuList { cpus }) => assert_eq!(cpus, "0,64-65"),
other => panic!("expected CpuList with 0,64-65, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_partial_trailing_bytes_ignored() {
let mut bytes = [0u8; 12];
bytes[0..8].copy_from_slice(&1u64.to_le_bytes());
bytes[8] = 0xff;
let v = try_render_cpumask_bits(&bytes, u32::MAX);
match v {
Some(RenderedValue::CpuList { cpus }) => assert_eq!(cpus, "0"),
other => panic!("expected CpuList with 0, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_garbage_capped_at_max_cpus() {
let bytes = vec![0xFFu8; 128];
let v = try_render_cpumask_bits(&bytes, 4);
match v {
Some(RenderedValue::CpuList { cpus }) => {
assert_eq!(
cpus, "0-3",
"max_cpus=4 must clip 1024-bit garbage to cpus 0-3, got: {cpus}",
);
}
other => panic!("expected CpuList 0-3, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_max_cpus_zero_yields_empty_list() {
let bits: u64 = 0xFFFF_FFFF_FFFF_FFFF;
let bytes = bits.to_le_bytes();
let v = try_render_cpumask_bits(&bytes, 0);
match v {
Some(RenderedValue::CpuList { cpus }) => {
assert_eq!(cpus, "", "max_cpus=0 must produce empty list, got: {cpus}");
}
other => panic!("expected empty CpuList, got {other:?}"),
}
}
#[test]
fn try_render_cpumask_bits_max_cpus_matches_word_width_keeps_all_bits() {
let bits: u64 = u64::MAX;
let bytes = bits.to_le_bytes();
let v = try_render_cpumask_bits(&bytes, 64);
match v {
Some(RenderedValue::CpuList { cpus }) => {
assert_eq!(
cpus, "0-63",
"max_cpus=64 must surface all 64 bits, got: {cpus}",
);
}
other => panic!("expected CpuList 0-63, got {other:?}"),
}
}
#[test]
fn mem_reader_default_nr_cpu_ids_is_u32_max() {
struct DefaultReader;
impl MemReader for DefaultReader {
fn read_kva(&self, _: u64, _: usize) -> Option<Vec<u8>> {
None
}
}
let r = DefaultReader;
assert_eq!(
r.nr_cpu_ids(),
u32::MAX,
"default nr_cpu_ids must be u32::MAX",
);
}
#[test]
fn mem_reader_custom_nr_cpu_ids_returns_overridden_value() {
struct CustomReader {
cpu_count: u32,
}
impl MemReader for CustomReader {
fn read_kva(&self, _: u64, _: usize) -> Option<Vec<u8>> {
None
}
fn nr_cpu_ids(&self) -> u32 {
self.cpu_count
}
}
let r = CustomReader { cpu_count: 16 };
assert_eq!(r.nr_cpu_ids(), 16);
}
#[test]
fn render_value_with_mem_caps_cpumask_at_reader_nr_cpu_ids() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("cpumask") else {
crate::report::test_skip("BTF missing 'cpumask' struct");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'cpumask' to empty id list");
return;
};
let Some(ty) = peel_modifiers(&btf, id) else {
crate::report::test_skip("could not peel cpumask modifiers");
return;
};
let size = match type_size(&btf, &ty) {
Some(n) if n >= 8 => n,
_ => {
crate::report::test_skip("cpumask size unresolved or < 8");
return;
}
};
let bytes = vec![0xFFu8; size];
struct EightCpuReader;
impl MemReader for EightCpuReader {
fn read_kva(&self, _: u64, _: usize) -> Option<Vec<u8>> {
None
}
fn nr_cpu_ids(&self) -> u32 {
8
}
}
let reader = EightCpuReader;
let v = render_value_with_mem(&btf, id, &bytes, &reader);
match v {
RenderedValue::CpuList { cpus } => {
assert_eq!(
cpus, "0-7",
"render_struct must propagate reader.nr_cpu_ids=8 to cpu-list \
rendering; got: {cpus}",
);
}
other => panic!("expected CpuList from cpumask render, got {other:?}"),
}
}
#[test]
fn render_value_without_mem_uses_u32_max_cap() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("cpumask") else {
crate::report::test_skip("BTF missing 'cpumask' struct");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'cpumask' to empty id list");
return;
};
let bytes = [0xFFu8; 8];
let v = render_value(&btf, id, &bytes);
match v {
RenderedValue::CpuList { cpus } => {
assert_eq!(
cpus, "0-63",
"no-reader cpumask must use u32::MAX cap (all 64 bits), got: {cpus}",
);
}
other => panic!("expected CpuList, got {other:?}"),
}
}
#[test]
fn is_text_byte_accepts_nul() {
assert!(is_text_byte(0x00), "NUL is the C string terminator");
}
#[test]
fn is_text_byte_accepts_newline() {
assert!(is_text_byte(b'\n'));
}
#[test]
fn is_text_byte_rejects_tab_and_cr() {
assert!(!is_text_byte(b'\t'));
assert!(!is_text_byte(b'\r'));
}
#[test]
fn is_text_byte_accepts_printable_ascii() {
assert!(is_text_byte(0x20)); assert!(is_text_byte(b'A'));
assert!(is_text_byte(0x7e)); }
#[test]
fn is_text_byte_rejects_other_control_chars() {
assert!(!is_text_byte(0x01));
assert!(!is_text_byte(0x07)); assert!(!is_text_byte(0x1f));
}
#[test]
fn is_text_byte_rejects_high_bit_bytes() {
assert!(!is_text_byte(0x7f)); assert!(!is_text_byte(0x80));
assert!(!is_text_byte(0xff));
}
#[test]
fn is_string_value_accepts_8bit_int_array() {
let v = RenderedValue::Array {
len: 4,
elements: vec![
RenderedValue::Int {
bits: 8,
value: b'h' as i64,
},
RenderedValue::Int {
bits: 8,
value: b'i' as i64,
},
RenderedValue::Int {
bits: 8,
value: b'\n' as i64,
},
RenderedValue::Int { bits: 8, value: 0 },
],
};
assert!(is_string_value(&v));
}
#[test]
fn is_string_value_accepts_8bit_uint_array() {
let v = RenderedValue::Array {
len: 2,
elements: vec![
RenderedValue::Uint {
bits: 8,
value: b'a' as u64,
},
RenderedValue::Uint {
bits: 8,
value: b'b' as u64,
},
],
};
assert!(is_string_value(&v));
}
#[test]
fn is_string_value_accepts_char_array() {
let v = RenderedValue::Array {
len: 2,
elements: vec![
RenderedValue::Char { value: b'X' },
RenderedValue::Char { value: 0 },
],
};
assert!(is_string_value(&v));
}
#[test]
fn is_string_value_rejects_too_short_array() {
let v = RenderedValue::Array {
len: 1,
elements: vec![RenderedValue::Char { value: b'X' }],
};
assert!(!is_string_value(&v));
}
#[test]
fn is_string_value_rejects_non_text_byte() {
let v = RenderedValue::Array {
len: 2,
elements: vec![
RenderedValue::Uint {
bits: 8,
value: b'a' as u64,
},
RenderedValue::Uint {
bits: 8,
value: 0x80,
},
],
};
assert!(!is_string_value(&v));
}
#[test]
fn is_string_value_rejects_wider_int() {
let v = RenderedValue::Array {
len: 2,
elements: vec![
RenderedValue::Uint {
bits: 32,
value: b'a' as u64,
},
RenderedValue::Uint {
bits: 32,
value: b'b' as u64,
},
],
};
assert!(!is_string_value(&v));
}
#[test]
fn is_string_value_rejects_non_array() {
assert!(!is_string_value(&RenderedValue::Bytes { hex: "00".into() }));
assert!(!is_string_value(&RenderedValue::Uint {
bits: 8,
value: b'a' as u64
}));
assert!(!is_string_value(&RenderedValue::Struct {
type_name: None,
members: vec![],
}));
}
#[test]
fn is_zero_int_uint_bool_char() {
assert!(is_zero(&RenderedValue::Int { bits: 32, value: 0 }));
assert!(!is_zero(&RenderedValue::Int {
bits: 32,
value: -1
}));
assert!(is_zero(&RenderedValue::Uint { bits: 64, value: 0 }));
assert!(!is_zero(&RenderedValue::Uint { bits: 64, value: 1 }));
assert!(is_zero(&RenderedValue::Bool { value: false }));
assert!(!is_zero(&RenderedValue::Bool { value: true }));
assert!(is_zero(&RenderedValue::Char { value: 0 }));
assert!(!is_zero(&RenderedValue::Char { value: b'a' }));
}
#[test]
fn is_zero_float() {
assert!(is_zero(&RenderedValue::Float {
bits: 64,
value: 0.0
}));
assert!(!is_zero(&RenderedValue::Float {
bits: 64,
value: 1.0
}));
assert!(is_zero(&RenderedValue::Float {
bits: 64,
value: -0.0
}));
}
#[test]
fn is_zero_enum() {
assert!(is_zero(&RenderedValue::Enum {
bits: 32,
value: 0,
variant: None
}));
assert!(!is_zero(&RenderedValue::Enum {
bits: 32,
value: 1,
variant: Some("RUNNING".into())
}));
}
#[test]
fn is_zero_cpulist_empty_vs_populated() {
assert!(
is_zero(&RenderedValue::CpuList {
cpus: String::new()
}),
"empty cpu list is zero"
);
assert!(
!is_zero(&RenderedValue::CpuList { cpus: "0-7".into() }),
"populated cpu list is non-zero"
);
}
#[test]
fn is_zero_ptr() {
assert!(is_zero(&RenderedValue::Ptr {
value: 0,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
}));
assert!(!is_zero(&RenderedValue::Ptr {
value: 0xffff_8000_dead_beef,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
}));
assert!(is_zero(&RenderedValue::Ptr {
value: 0,
deref: Some(Box::new(RenderedValue::Uint { bits: 32, value: 5 })),
deref_skipped_reason: None,
cast_annotation: None,
}));
}
#[test]
fn is_zero_compound_always_false() {
assert!(!is_zero(&RenderedValue::Struct {
type_name: None,
members: vec![],
}));
assert!(!is_zero(&RenderedValue::Array {
len: 0,
elements: vec![],
}));
assert!(!is_zero(&RenderedValue::Bytes { hex: "".into() }));
assert!(!is_zero(&RenderedValue::Unsupported { reason: "x".into() }));
}
#[test]
fn is_inline_scalar_accepts_scalars() {
assert!(is_inline_scalar(&RenderedValue::Int { bits: 32, value: 0 }));
assert!(is_inline_scalar(&RenderedValue::Uint {
bits: 64,
value: 1
}));
assert!(is_inline_scalar(&RenderedValue::Bool { value: false }));
assert!(is_inline_scalar(&RenderedValue::Char { value: b'x' }));
assert!(is_inline_scalar(&RenderedValue::Float {
bits: 64,
value: 0.0
}));
assert!(is_inline_scalar(&RenderedValue::Enum {
bits: 32,
value: 0,
variant: None,
}));
assert!(is_inline_scalar(&RenderedValue::Ptr {
value: 0,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
}));
assert!(is_inline_scalar(&RenderedValue::Bytes { hex: "00".into() }));
assert!(is_inline_scalar(&RenderedValue::Unsupported {
reason: "void".into(),
}));
}
#[test]
fn is_inline_scalar_rejects_composites() {
assert!(!is_inline_scalar(&RenderedValue::Struct {
type_name: None,
members: vec![],
}));
assert!(!is_inline_scalar(&RenderedValue::Array {
len: 0,
elements: vec![],
}));
assert!(!is_inline_scalar(&RenderedValue::CpuList {
cpus: "0".into(),
}));
assert!(!is_inline_scalar(&RenderedValue::Truncated {
needed: 4,
had: 0,
partial: Box::new(RenderedValue::Bytes { hex: "".into() }),
}));
assert!(!is_inline_scalar(&RenderedValue::Ptr {
value: 0x1000,
deref: Some(Box::new(RenderedValue::Struct {
type_name: Some("scx_cgroup_llc_ctx".into()),
members: vec![],
})),
deref_skipped_reason: None,
cast_annotation: None,
}));
}
#[test]
fn struct_with_only_anonymous_members_renders_inner_fields() {
let v = RenderedValue::Struct {
type_name: Some("scx_cgroup_ctx".into()),
members: vec![
RenderedMember {
name: "id".into(),
value: RenderedValue::Uint {
bits: 64,
value: 49,
},
},
RenderedMember {
name: "quota".into(),
value: RenderedValue::Uint {
bits: 64,
value: 5000000,
},
},
RenderedMember {
name: "is_throttled".into(),
value: RenderedValue::Bool { value: true },
},
RenderedMember {
name: "nr_throttled".into(),
value: RenderedValue::Uint {
bits: 32,
value: 100,
},
},
],
};
let rendered = format!("{v}");
assert!(
!rendered.ends_with("{}"),
"flattened struct must not render as empty: {rendered}"
);
assert!(
rendered.contains("id"),
"field 'id' must be visible: {rendered}"
);
assert!(
rendered.contains("49"),
"field value must be visible: {rendered}"
);
assert!(
rendered.contains("is_throttled"),
"field 'is_throttled' must be visible: {rendered}"
);
}
#[test]
fn struct_with_nested_anonymous_members_flattens_on_display() {
let v = RenderedValue::Struct {
type_name: Some("scx_cgroup_ctx".into()),
members: vec![
RenderedMember {
name: String::new(),
value: RenderedValue::Struct {
type_name: None,
members: vec![
RenderedMember {
name: "id".into(),
value: RenderedValue::Uint {
bits: 64,
value: 49,
},
},
RenderedMember {
name: "quota".into(),
value: RenderedValue::Uint {
bits: 64,
value: 5000000,
},
},
],
},
},
RenderedMember {
name: String::new(),
value: RenderedValue::Struct {
type_name: None,
members: vec![RenderedMember {
name: "is_throttled".into(),
value: RenderedValue::Bool { value: true },
}],
},
},
],
};
let rendered = format!("{v}");
assert!(
!rendered.ends_with("{}"),
"nested anonymous struct must not render as empty: {rendered}"
);
assert!(
rendered.contains("id"),
"inner field 'id' must be visible: {rendered}"
);
assert!(
rendered.contains("is_throttled"),
"inner field 'is_throttled' must be visible: {rendered}"
);
}
#[test]
fn display_ptr_with_scalar_deref_uses_arrow() {
let v = RenderedValue::Ptr {
value: 0xffff_8000_1234_5678,
deref: Some(Box::new(RenderedValue::Uint {
bits: 32,
value: 42,
})),
deref_skipped_reason: None,
cast_annotation: None,
};
let out = format!("{v}");
assert!(
out.contains(" → "),
"Display must include arrow separator: {out}"
);
assert!(out.starts_with("0xffff800012345678"));
assert!(out.ends_with("42"));
}
#[test]
fn display_ptr_with_cpulist_deref_renders_inline() {
let v = RenderedValue::Ptr {
value: 0xffff_8888_aaaa_bbbb,
deref: Some(Box::new(RenderedValue::CpuList { cpus: "0-3".into() })),
deref_skipped_reason: None,
cast_annotation: None,
};
assert_eq!(format!("{v}"), "0xffff8888aaaabbbb → cpus={0-3}");
}
#[test]
fn display_ptr_with_struct_deref_indents_correctly() {
let inner = RenderedValue::Struct {
type_name: Some("inner".into()),
members: vec![RenderedMember {
name: "v".into(),
value: RenderedValue::Uint { bits: 32, value: 7 },
}],
};
let v = RenderedValue::Ptr {
value: 0xdead_beef,
deref: Some(Box::new(inner)),
deref_skipped_reason: None,
cast_annotation: None,
};
let out = format!("{v}");
assert!(out.contains("0xdeadbeef → inner{"));
assert!(out.contains("v=7"));
}
#[test]
fn display_ptr_without_deref_no_arrow() {
let v = RenderedValue::Ptr {
value: 0xff,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
};
let out = format!("{v}");
assert!(
!out.contains("→"),
"no-deref Ptr must not have arrow: {out}"
);
assert_eq!(out, "0xff");
}
#[test]
fn display_ptr_with_skip_reason_surfaces_inline() {
let v = RenderedValue::Ptr {
value: 0x7fff_aaaa_0000,
deref: None,
deref_skipped_reason: Some(
"arena read failed (cross-page boundary or unmapped page)".to_string(),
),
cast_annotation: None,
};
let out = format!("{v}");
assert!(
out.contains("[chase: arena read failed"),
"skip reason must be surfaced in [chase: ...] form: {out}"
);
assert!(
out.starts_with("0x7fffaaaa0000"),
"pointer hex must come first: {out}"
);
assert!(
!out.contains("→"),
"skip reason render must NOT emit arrow (no actual deref): {out}"
);
}
#[test]
fn array_of_3_similar_structs_uses_template_block() {
let mk = |x: u64| RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![
RenderedMember {
name: "common".into(),
value: RenderedValue::Uint {
bits: 32,
value: 100,
},
},
RenderedMember {
name: "x".into(),
value: RenderedValue::Uint { bits: 32, value: x },
},
],
};
let v = RenderedValue::Array {
len: 3,
elements: vec![mk(1), mk(2), mk(3)],
};
let out = format!("{v}");
assert!(
out.contains("[0-2] s:"),
"must surface template index range header: {out}"
);
assert!(out.contains("common=100"), "common field once: {out}");
assert!(out.contains("x: "), "varying field name present: {out}");
assert!(out.contains("[0]="), "per-index marker for first: {out}");
assert!(out.contains("[2]="), "per-index marker for last: {out}");
}
#[test]
fn array_of_2_similar_structs_renders_per_element() {
let mk = |x: u64| RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![RenderedMember {
name: "x".into(),
value: RenderedValue::Uint { bits: 32, value: x },
}],
};
let v = RenderedValue::Array {
len: 2,
elements: vec![mk(1), mk(2)],
};
let out = format!("{v}");
assert!(
!out.contains("[0-1]"),
"two-element array must not use template: {out}"
);
assert!(out.contains("[0] s{"), "missing [0]: {out}");
assert!(out.contains("[1] s{"), "missing [1]: {out}");
assert!(out.contains("x=1"), "missing x=1: {out}");
assert!(out.contains("x=2"), "missing x=2: {out}");
}
#[test]
fn array_with_too_many_varying_fields_falls_back() {
let mk = |a: u64, b: u64, c: u64, d: u64, e: u64| RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![
RenderedMember {
name: "a".into(),
value: RenderedValue::Uint { bits: 32, value: a },
},
RenderedMember {
name: "b".into(),
value: RenderedValue::Uint { bits: 32, value: b },
},
RenderedMember {
name: "c".into(),
value: RenderedValue::Uint { bits: 32, value: c },
},
RenderedMember {
name: "d".into(),
value: RenderedValue::Uint { bits: 32, value: d },
},
RenderedMember {
name: "e".into(),
value: RenderedValue::Uint { bits: 32, value: e },
},
],
};
let v = RenderedValue::Array {
len: 3,
elements: vec![mk(1, 1, 1, 1, 1), mk(2, 2, 2, 2, 2), mk(3, 3, 3, 3, 3)],
};
let out = format!("{v}");
assert!(
!out.contains("[0-2]"),
">3 varying fields must skip template, falls back to per-element: {out}",
);
assert!(out.contains("[0] s{"), "missing [0]: {out}");
assert!(out.contains("[1] s{"), "missing [1]: {out}");
assert!(out.contains("[2] s{"), "missing [2]: {out}");
}
#[test]
fn array_of_identical_structs_groups_via_run() {
let s = RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![RenderedMember {
name: "x".into(),
value: RenderedValue::Uint { bits: 32, value: 5 },
}],
};
let v = RenderedValue::Array {
len: 3,
elements: vec![s.clone(), s.clone(), s],
};
let out = format!("{v}");
assert!(out.contains("[0-2] s{"), "must group identical: {out}");
}
#[test]
fn array_inline_sparse_runs() {
let v = RenderedValue::Array {
len: 5,
elements: vec![
RenderedValue::Uint { bits: 32, value: 0 },
RenderedValue::Uint { bits: 32, value: 1 },
RenderedValue::Uint { bits: 32, value: 0 },
RenderedValue::Uint { bits: 32, value: 0 },
RenderedValue::Uint { bits: 32, value: 2 },
],
};
assert_eq!(format!("{v}"), "[[1]=0x1 [4]=0x2]");
}
#[test]
fn array_inline_all_zero_collapses() {
let v = RenderedValue::Array {
len: 3,
elements: vec![
RenderedValue::Uint { bits: 32, value: 0 },
RenderedValue::Uint { bits: 32, value: 0 },
RenderedValue::Uint { bits: 32, value: 0 },
],
};
assert_eq!(format!("{v}"), "[all 3 zero]");
}
#[test]
fn array_block_all_zero_collapses() {
let v = RenderedValue::Array {
len: 2,
elements: vec![
RenderedValue::Ptr {
value: 0,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
},
RenderedValue::Ptr {
value: 0,
deref: None,
deref_skipped_reason: None,
cast_annotation: None,
},
],
};
let out = format!("{v}");
assert!(
out.contains("all 2 zero"),
"inline all-zero collapse: {out}"
);
}
#[test]
fn struct_zero_field_suppression_drops_silently() {
let v = RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![
RenderedMember {
name: "shown".into(),
value: RenderedValue::Uint { bits: 32, value: 5 },
},
RenderedMember {
name: "zero1".into(),
value: RenderedValue::Uint { bits: 32, value: 0 },
},
RenderedMember {
name: "zero2".into(),
value: RenderedValue::Uint { bits: 32, value: 0 },
},
],
};
let out = format!("{v}");
assert!(out.contains("shown=5"), "non-zero field shown: {out}");
assert!(!out.contains("zero1"), "zero fields suppressed: {out}");
assert!(
!out.contains("fields zero"),
"no `(N fields zero)` summary in any form: {out}",
);
}
#[test]
fn struct_all_zero_emits_empty_inline_form() {
let v = RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![
RenderedMember {
name: "a".into(),
value: RenderedValue::Uint { bits: 32, value: 0 },
},
RenderedMember {
name: "b".into(),
value: RenderedValue::Uint { bits: 32, value: 0 },
},
],
};
let out = format!("{v}");
assert_eq!(
out, "s{}",
"all-zero struct collapses to empty inline form: {out}",
);
}
#[test]
fn struct_bpf_printk_format_strings_collapsed() {
let fmt_string_value = RenderedValue::Array {
len: 3,
elements: vec![
RenderedValue::Char { value: b'h' },
RenderedValue::Char { value: b'i' },
RenderedValue::Char { value: 0 },
],
};
let v = RenderedValue::Struct {
type_name: Some("s".into()),
members: vec![
RenderedMember {
name: "real_field".into(),
value: RenderedValue::Uint {
bits: 32,
value: 42,
},
},
RenderedMember {
name: "ktstr___fmt_blah".into(),
value: fmt_string_value.clone(),
},
RenderedMember {
name: "____fmt_other".into(),
value: fmt_string_value,
},
],
};
let out = format!("{v}");
assert!(out.contains("real_field=42"));
assert!(
!out.contains("ktstr___fmt_blah"),
"fmt string suppressed: {out}"
);
assert!(
!out.contains("____fmt_other"),
"fmt string suppressed: {out}"
);
}
#[test]
fn array_renders_as_quoted_string_when_printable() {
let v = RenderedValue::Array {
len: 6,
elements: vec![
RenderedValue::Char { value: b'h' },
RenderedValue::Char { value: b'e' },
RenderedValue::Char { value: b'l' },
RenderedValue::Char { value: b'l' },
RenderedValue::Char { value: b'o' },
RenderedValue::Char { value: 0 },
],
};
let out = format!("{v}");
assert_eq!(out, "\"hello\"");
}
#[test]
fn array_renders_multiline_string_with_pipe() {
let v = RenderedValue::Array {
len: 8,
elements: vec![
RenderedValue::Char { value: b'a' },
RenderedValue::Char { value: b'\n' },
RenderedValue::Char { value: b'b' },
RenderedValue::Char { value: b'\n' },
RenderedValue::Char { value: b'c' },
RenderedValue::Char { value: 0 },
RenderedValue::Char { value: 0 },
RenderedValue::Char { value: 0 },
],
};
let out = format!("{v}");
assert!(
out.starts_with("|\n"),
"must start with pipe + newline: {out}"
);
assert!(out.contains("a"), "must contain first segment: {out}");
assert!(out.contains("b"), "must contain second segment: {out}");
}
#[test]
fn write_array_element_uint_wide_renders_hex() {
let v = RenderedValue::Array {
len: 2,
elements: vec![
RenderedValue::Uint {
bits: 32,
value: 255,
},
RenderedValue::Uint {
bits: 64,
value: 0xdead_beef,
},
],
};
let out = format!("{v}");
assert!(out.contains("0xff"), "32-bit uint hex: {out}");
assert!(out.contains("0xdeadbeef"), "64-bit uint hex: {out}");
}
struct CycleArenaReader {
bytes_by_addr: std::collections::HashMap<u64, Vec<u8>>,
arena_start: u64,
arena_end: u64,
}
impl MemReader for CycleArenaReader {
fn read_kva(&self, _: u64, _: usize) -> Option<Vec<u8>> {
None
}
fn is_arena_addr(&self, addr: u64) -> bool {
addr >= self.arena_start && addr < self.arena_end
}
fn read_arena(&self, addr: u64, len: usize) -> Option<Vec<u8>> {
let bytes = self.bytes_by_addr.get(&addr)?;
if bytes.len() < len {
return None;
}
Some(bytes[..len].to_vec())
}
}
#[test]
fn ptr_cycle_self_pointing_surfaces_cycle_reason() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("list_head") else {
crate::report::test_skip("BTF missing 'list_head'");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'list_head' to empty id list");
return;
};
let Some(ty) = peel_modifiers(&btf, id) else {
crate::report::test_skip("could not peel list_head modifiers");
return;
};
let Type::Struct(_) = ty else {
crate::report::test_skip("BTF 'list_head' is not a Struct");
return;
};
let Some(size) = type_size(&btf, &ty) else {
crate::report::test_skip("list_head size unresolved");
return;
};
const ARENA_START: u64 = 0x10_0000_0000;
const ARENA_END: u64 = 0x10_0001_0000;
const NODE_A: u64 = 0x10_0000_1000;
let mut node_bytes = vec![0u8; size];
node_bytes[0..8].copy_from_slice(&NODE_A.to_le_bytes());
node_bytes[8..16].copy_from_slice(&NODE_A.to_le_bytes());
let mut bytes_by_addr = std::collections::HashMap::new();
bytes_by_addr.insert(NODE_A, node_bytes);
let reader = CycleArenaReader {
bytes_by_addr,
arena_start: ARENA_START,
arena_end: ARENA_END,
};
let mut outer = vec![0u8; size];
outer[0..8].copy_from_slice(&NODE_A.to_le_bytes());
outer[8..16].copy_from_slice(&NODE_A.to_le_bytes());
let v = render_value_with_mem(&btf, id, &outer, &reader);
let out = format!("{v}");
assert!(
out.contains("[cycle]"),
"rendered output must surface cycle marker for a self-pointing list_head: {out}",
);
let node_hex = format!("0x{NODE_A:x}");
let occurrences = out.matches(&node_hex).count();
assert!(
occurrences < 10,
"cycle detection must bound recursion; saw {occurrences} \
occurrences of {node_hex}: {out}",
);
}
#[test]
fn ptr_cycle_two_node_loop_surfaces_cycle_reason() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("list_head") else {
crate::report::test_skip("BTF missing 'list_head'");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'list_head' to empty id list");
return;
};
let Some(ty) = peel_modifiers(&btf, id) else {
crate::report::test_skip("could not peel list_head modifiers");
return;
};
let Type::Struct(_) = ty else {
crate::report::test_skip("BTF 'list_head' is not a Struct");
return;
};
let Some(size) = type_size(&btf, &ty) else {
crate::report::test_skip("list_head size unresolved");
return;
};
const ARENA_START: u64 = 0x10_0000_0000;
const ARENA_END: u64 = 0x10_0001_0000;
const NODE_A: u64 = 0x10_0000_1000;
const NODE_B: u64 = 0x10_0000_2000;
let mut a_bytes = vec![0u8; size];
a_bytes[0..8].copy_from_slice(&NODE_B.to_le_bytes());
a_bytes[8..16].copy_from_slice(&NODE_B.to_le_bytes());
let mut b_bytes = vec![0u8; size];
b_bytes[0..8].copy_from_slice(&NODE_A.to_le_bytes());
b_bytes[8..16].copy_from_slice(&NODE_A.to_le_bytes());
let mut bytes_by_addr = std::collections::HashMap::new();
bytes_by_addr.insert(NODE_A, a_bytes);
bytes_by_addr.insert(NODE_B, b_bytes);
let reader = CycleArenaReader {
bytes_by_addr,
arena_start: ARENA_START,
arena_end: ARENA_END,
};
let mut outer = vec![0u8; size];
outer[0..8].copy_from_slice(&NODE_B.to_le_bytes());
outer[8..16].copy_from_slice(&NODE_B.to_le_bytes());
let v = render_value_with_mem(&btf, id, &outer, &reader);
let out = format!("{v}");
assert!(
out.contains("[cycle]"),
"two-node cycle must surface cycle marker: {out}",
);
}
#[test]
fn ptr_cycle_visited_set_does_not_leak_across_calls() {
let Some(btf) = test_btf() else {
crate::report::test_skip("test_btf returned None");
return;
};
let Ok(ids) = btf.resolve_ids_by_name("list_head") else {
crate::report::test_skip("BTF missing 'list_head'");
return;
};
let Some(&id) = ids.first() else {
crate::report::test_skip("BTF resolved 'list_head' to empty id list");
return;
};
let Some(ty) = peel_modifiers(&btf, id) else {
crate::report::test_skip("could not peel list_head modifiers");
return;
};
let Type::Struct(_) = ty else {
crate::report::test_skip("BTF 'list_head' is not a Struct");
return;
};
let Some(size) = type_size(&btf, &ty) else {
crate::report::test_skip("list_head size unresolved");
return;
};
const ARENA_START: u64 = 0x10_0000_0000;
const ARENA_END: u64 = 0x10_0001_0000;
const NODE_A: u64 = 0x10_0000_1000;
let mut node_bytes = vec![0u8; size];
node_bytes[0..8].copy_from_slice(&NODE_A.to_le_bytes());
node_bytes[8..16].copy_from_slice(&NODE_A.to_le_bytes());
let mut bytes_by_addr = std::collections::HashMap::new();
bytes_by_addr.insert(NODE_A, node_bytes);
let reader = CycleArenaReader {
bytes_by_addr,
arena_start: ARENA_START,
arena_end: ARENA_END,
};
let mut outer = vec![0u8; size];
outer[0..8].copy_from_slice(&NODE_A.to_le_bytes());
outer[8..16].copy_from_slice(&NODE_A.to_le_bytes());
let v1 = render_value_with_mem(&btf, id, &outer, &reader);
let out1 = format!("{v1}");
assert!(out1.contains("[cycle]"), "call 1 cycle: {out1}");
let v2 = render_value_with_mem(&btf, id, &outer, &reader);
let out2 = format!("{v2}");
assert!(out2.contains("[cycle]"), "call 2 cycle: {out2}");
assert_eq!(out1, out2, "fresh visited set per call: outputs must match",);
}
#[test]
fn cast_annotation_for_all_four_cells() {
assert_eq!(
cast_annotation_for(AddrSpace::Arena, false),
"cast→arena",
"(Arena, false) annotation drift",
);
assert_eq!(
cast_annotation_for(AddrSpace::Arena, true),
"cast→arena (sdt_alloc)",
"(Arena, true) annotation drift",
);
assert_eq!(
cast_annotation_for(AddrSpace::Kernel, false),
"cast→kernel",
"(Kernel, false) annotation drift",
);
assert_eq!(
cast_annotation_for(AddrSpace::Kernel, true),
"cast→kernel (sdt_alloc)",
"(Kernel, true) annotation drift",
);
}
const CAST_BTF_MAGIC: u16 = 0xEB9F;
const CAST_BTF_VERSION: u8 = 1;
const CAST_BTF_HEADER_LEN: u32 = 24;
const CAST_BTF_KIND_INT: u32 = 1;
const CAST_BTF_KIND_PTR: u32 = 2;
const CAST_BTF_KIND_STRUCT: u32 = 4;
const CAST_BTF_KIND_FWD: u32 = 7;
const CAST_BTF_KIND_TYPEDEF: u32 = 8;
const CAST_BTF_KIND_CONST: u32 = 10;
fn cast_build_btf(types: &[CastSynType], strings: &[u8]) -> Vec<u8> {
let mut type_section = Vec::new();
for ty in types {
match ty {
CastSynType::Int {
name_off,
size,
encoding,
offset,
bits,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (CAST_BTF_KIND_INT << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
let int_data = (*encoding << 24) | ((*offset & 0xff) << 16) | (*bits & 0xff);
type_section.extend_from_slice(&int_data.to_le_bytes());
}
CastSynType::Struct {
name_off,
size,
members,
} => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let vlen = members.len() as u32;
let info = ((CAST_BTF_KIND_STRUCT << 24) & 0x1f00_0000) | (vlen & 0xffff);
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&size.to_le_bytes());
for m in members {
type_section.extend_from_slice(&m.name_off.to_le_bytes());
type_section.extend_from_slice(&m.type_id.to_le_bytes());
let bit_off = m.byte_offset * 8;
type_section.extend_from_slice(&bit_off.to_le_bytes());
}
}
CastSynType::Typedef { name_off, type_id } => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (CAST_BTF_KIND_TYPEDEF << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
CastSynType::Const { type_id } => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (CAST_BTF_KIND_CONST << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
CastSynType::Ptr { type_id } => {
let name_off: u32 = 0;
type_section.extend_from_slice(&name_off.to_le_bytes());
let info = (CAST_BTF_KIND_PTR << 24) & 0x1f00_0000;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&type_id.to_le_bytes());
}
CastSynType::Fwd { name_off, is_union } => {
type_section.extend_from_slice(&name_off.to_le_bytes());
let kind_flag = if *is_union { 1u32 << 31 } else { 0 };
let info = ((CAST_BTF_KIND_FWD << 24) & 0x1f00_0000) | kind_flag;
type_section.extend_from_slice(&info.to_le_bytes());
type_section.extend_from_slice(&0u32.to_le_bytes());
}
}
}
let type_len = type_section.len() as u32;
let str_len = strings.len() as u32;
let mut blob = Vec::new();
blob.extend_from_slice(&CAST_BTF_MAGIC.to_le_bytes());
blob.push(CAST_BTF_VERSION);
blob.push(0); blob.extend_from_slice(&CAST_BTF_HEADER_LEN.to_le_bytes());
blob.extend_from_slice(&0u32.to_le_bytes()); blob.extend_from_slice(&type_len.to_le_bytes());
blob.extend_from_slice(&type_len.to_le_bytes()); blob.extend_from_slice(&str_len.to_le_bytes());
blob.extend_from_slice(&type_section);
blob.extend_from_slice(strings);
blob
}
#[derive(Clone, Copy)]
struct CastSynMember {
name_off: u32,
type_id: u32,
byte_offset: u32,
}
enum CastSynType {
Int {
name_off: u32,
size: u32,
encoding: u32,
offset: u32,
bits: u32,
},
Struct {
name_off: u32,
size: u32,
members: Vec<CastSynMember>,
},
Typedef { name_off: u32, type_id: u32 },
Const { type_id: u32 },
Ptr { type_id: u32 },
Fwd { name_off: u32, is_union: bool },
}
fn cast_strings_for_t_q() -> (Vec<u8>, u32, u32, u32, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_q = push(&mut strings, "Q");
let n_f = push(&mut strings, "f");
let n_x = push(&mut strings, "x");
(strings, n_int, n_t, n_q, n_f, n_x)
}
fn cast_btf_t_and_q() -> (Vec<u8>, u32, u32) {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
(cast_build_btf(&types, &strings), 2, 3)
}
fn cast_btf_t_with_u32() -> (Vec<u8>, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_u32 = push(&mut strings, "u32");
let n_u64 = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_q = push(&mut strings, "Q");
let n_f = push(&mut strings, "f");
let n_x = push(&mut strings, "x");
let types = vec![
CastSynType::Int {
name_off: n_u32,
size: 4,
encoding: 0,
offset: 0,
bits: 32,
},
CastSynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 4,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 2,
byte_offset: 0,
}],
},
];
(cast_build_btf(&types, &strings), 3, 4)
}
#[derive(Default)]
struct CastStubReader {
hit: Option<CastHit>,
cast_map: Option<super::super::cast_analysis::CastMap>,
arena_window: Option<(u64, u64)>,
arena_bytes_at: std::collections::HashMap<u64, Vec<u8>>,
kva_bytes_at: std::collections::HashMap<u64, Vec<u8>>,
arena_type_at: std::collections::HashMap<u64, ArenaResolveHit>,
cross_btf_btfs: Vec<std::sync::Arc<Btf>>,
cross_btf_index: std::collections::HashMap<String, (usize, u32, bool)>,
rendered_slot_addrs: std::collections::HashSet<u32>,
}
impl MemReader for CastStubReader {
fn read_kva(&self, kva: u64, len: usize) -> Option<Vec<u8>> {
let bytes = self.kva_bytes_at.get(&kva)?;
if bytes.len() < len {
return None;
}
Some(bytes[..len].to_vec())
}
fn is_arena_addr(&self, addr: u64) -> bool {
match self.arena_window {
Some((lo, hi)) => addr >= lo && addr < hi,
None => false,
}
}
fn read_arena(&self, addr: u64, len: usize) -> Option<Vec<u8>> {
let bytes = self.arena_bytes_at.get(&addr)?;
if bytes.len() < len {
return None;
}
Some(bytes[..len].to_vec())
}
fn cast_lookup(&self, parent_type_id: u32, member_byte_offset: u32) -> Option<CastHit> {
if let Some(map) = &self.cast_map {
return map.get(&(parent_type_id, member_byte_offset)).copied();
}
self.hit
}
fn resolve_arena_type(&self, addr: u64) -> Option<ArenaResolveHit> {
self.arena_type_at.get(&addr).copied()
}
fn cross_btf_resolve_fwd(
&self,
name: &str,
kind: super::FwdKind,
) -> Option<super::CrossBtfRef<'_>> {
let &(idx, type_id, idx_is_struct) = self.cross_btf_index.get(name)?;
let idx_kind = super::FwdKind::from_is_struct(idx_is_struct);
if idx_kind != kind {
return None;
}
let btf = self.cross_btf_btfs.get(idx)?;
Some(super::CrossBtfRef {
btf: btf.as_ref(),
type_id,
})
}
fn is_already_rendered(&self, addr: u64) -> bool {
self.rendered_slot_addrs.contains(&(addr as u32))
}
}
#[test]
fn cast_intercept_u64_renders_as_ptr_with_chase() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct {
type_name,
ref members,
} = v
else {
panic!("expected Struct render, got {v:?}");
};
assert_eq!(type_name.as_deref(), Some("T"));
assert_eq!(members.len(), 1);
assert_eq!(members[0].name, "f");
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"intercept must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert_eq!(
value, TARGET_ADDR,
"Ptr value must be the loaded u64 (arena address)"
);
assert!(
deref_skipped_reason.is_none(),
"successful chase: no skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("chase succeeded → deref must be Some");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the rendered Q Struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"inner deref Struct must carry the target's name"
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "x");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x42);
}
#[test]
fn cast_intercept_null_value_no_crash() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let outer_bytes = 0u64.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"null intercept must still surface as Ptr (matches Type::Ptr arm); got {:?}",
members[0].value
);
};
assert_eq!(value, 0);
assert!(deref.is_none(), "null Ptr has no deref");
assert!(
deref_skipped_reason.is_none(),
"null Ptr must NOT carry a skip reason: a chase was never attempted"
);
}
#[test]
fn cast_intercept_non_u64_field_not_intercepted() {
let (blob, t_id, q_id) = cast_btf_t_with_u32();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let outer_bytes = 0xCAFEu32.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Uint { bits, value } = members[0].value else {
panic!(
"u32 field with size==4 must render as Uint, NOT Ptr; got {:?}",
members[0].value
);
};
assert_eq!(bits, 32, "u32 surfaces as 32-bit Uint");
assert_eq!(value, 0xCAFE);
}
#[test]
fn cast_intercept_no_hit_renders_uint() {
let (blob, t_id, _q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let outer_bytes = 0x12345678u64.to_le_bytes().to_vec();
let reader = CastStubReader::default();
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Uint { bits, value } = members[0].value else {
panic!(
"no cast_lookup hit must yield plain Uint, got {:?}",
members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x12345678);
}
#[test]
fn cast_chase_cycle_detection() {
let (blob, t_id, _q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const SELF_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = SELF_ADDR.to_le_bytes().to_vec();
let self_bytes = SELF_ADDR.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(SELF_ADDR, self_bytes);
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: t_id,
addr_space: AddrSpace::Arena,
}),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value: outer_value,
deref: ref outer_deref,
deref_skipped_reason: ref outer_reason,
..
} = members[0].value
else {
panic!(
"outer chase must surface as Ptr, got {:?}",
members[0].value
);
};
assert_eq!(outer_value, SELF_ADDR);
assert!(
outer_reason.is_none(),
"outer chase succeeded; no skip reason expected, got {outer_reason:?}"
);
let inner = outer_deref.as_deref().expect("outer chase deref Some");
let RenderedValue::Struct {
members: ref inner_members,
..
} = *inner
else {
panic!("inner deref must be a Struct, got {inner:?}");
};
let RenderedValue::Ptr {
value: inner_value,
deref: ref inner_deref,
deref_skipped_reason: ref inner_reason,
..
} = inner_members[0].value
else {
panic!(
"inner u64 cast intercept must surface as Ptr, got {:?}",
inner_members[0].value
);
};
assert_eq!(inner_value, SELF_ADDR);
assert!(
inner_deref.is_none(),
"cycle detection must NOT recurse into the deref payload"
);
let reason = inner_reason
.as_deref()
.expect("cycle detection must populate deref_skipped_reason");
assert!(
reason.contains("cycle"),
"skip reason must mention cycle, got: {reason}"
);
}
#[test]
fn cast_chase_kernel_plausibility_rejects_freed_slab() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const KVA: u64 = 0xffff_8000_dead_beef;
let outer_bytes = KVA.to_le_bytes().to_vec();
let stale_bytes: Vec<u8> = 0xff00_0000_0000_0001u64.to_le_bytes().to_vec();
let mut kva_bytes = std::collections::HashMap::new();
kva_bytes.insert(KVA, stale_bytes);
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
}),
kva_bytes_at: kva_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"kernel cast intercept must surface as Ptr, got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref.is_none(),
"plausibility-rejected chase must NOT carry a deref payload"
);
let reason = deref_skipped_reason
.as_deref()
.expect("plausibility rejection must populate skip reason");
assert!(
reason.contains("plausibility"),
"skip reason must mention plausibility, got: {reason}"
);
}
#[test]
fn cast_intercept_kernel_hint_arena_value_dispatches_to_arena_reader() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"Kernel-hint + arena-value cast must surface as Ptr, got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR, "Ptr value is the loaded u64");
assert!(
deref_skipped_reason.is_none(),
"arena dispatch chose the arena reader → no skip reason; got {deref_skipped_reason:?}",
);
let inner = deref
.as_deref()
.expect("arena reader returned Some bytes → deref payload populated");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!(
"deref payload must be the rendered Q struct (proves arena chase, \
not kernel chase, did the read), got {inner:?}",
);
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"inner deref carries Q's name → render_value_inner(target_type_id) succeeded",
);
assert_eq!(inner_members.len(), 1, "Q has one u64 member");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!(
"Q.x must render as Uint (was rendered through arena reader bytes), got {:?}",
inner_members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x42, "arena reader returned 0x42 at TARGET_ADDR");
assert_eq!(
cast_annotation.as_deref(),
Some("cast→arena"),
"runtime dispatch chose arena → annotation is cast→arena, NOT cast→kernel; \
got {cast_annotation:?}",
);
}
use super::super::cast_analysis::{BpfInsn, CastMap, InitialReg, analyze_casts};
fn cast_ldx_dw_mem_code() -> u8 {
use libbpf_rs::libbpf_sys as bs;
(bs::BPF_LDX | bs::BPF_DW | bs::BPF_MEM) as u8
}
fn cast_exit_code() -> u8 {
use libbpf_rs::libbpf_sys as bs;
(bs::BPF_JMP | bs::BPF_EXIT) as u8
}
fn cast_ldx_dw(dst: u8, src: u8, off: i16) -> BpfInsn {
BpfInsn::new(cast_ldx_dw_mem_code(), dst, src, off, 0)
}
fn cast_exit() -> BpfInsn {
BpfInsn::new(cast_exit_code(), 0, 0, 0, 0)
}
fn cast_addr_space_cast(dst: u8, src: u8, imm: i32) -> BpfInsn {
use libbpf_rs::libbpf_sys as bs;
let code = (bs::BPF_ALU64 | bs::BPF_MOV | bs::BPF_X) as u8;
BpfInsn::new(code, dst, src, 1, imm)
}
fn cast_btf_t_at_offset_8_q_at_offset_0() -> (Vec<u8>, u32, u32) {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 16,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
(cast_build_btf(&types, &strings), 2, 3)
}
#[test]
fn cast_pipeline_analyzer_output_drives_renderer_intercept() {
let (blob, t_id, q_id) = cast_btf_t_at_offset_8_q_at_offset_0();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let insns = vec![
cast_ldx_dw(2, 1, 8),
cast_addr_space_cast(2, 2, 1),
cast_ldx_dw(3, 2, 0),
cast_exit(),
];
let cast_map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: t_id,
}],
&[],
&[],
&[],
);
assert_eq!(
cast_map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
"analyzer must emit (T, 8) → (Q, Arena); got: {cast_map:?}"
);
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let mut outer_bytes = vec![0u8; 16];
outer_bytes[8..16].copy_from_slice(&TARGET_ADDR.to_le_bytes());
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct {
type_name,
ref members,
} = v
else {
panic!("expected outer Struct render, got {v:?}");
};
assert_eq!(type_name.as_deref(), Some("T"));
assert_eq!(members.len(), 1, "T has a single u64 member at offset 8");
assert_eq!(members[0].name, "f");
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"(T, 8) cast hit must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR, "Ptr value must be the loaded u64");
assert!(
deref_skipped_reason.is_none(),
"successful chase must carry no skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("chase succeeded → deref must be Some");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the rendered Q struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"deref payload must carry Q's name"
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "x");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x42);
}
#[test]
fn cast_pipeline_modifier_chain_renderer_peels_to_analyzer_struct_id() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut strings = strings;
let n_typedef = push(&mut strings, "T_alias");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 16,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 8,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Const { type_id: 2 },
CastSynType::Typedef {
name_off: n_typedef,
type_id: 4,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let typedef_id: u32 = 5;
let insns = vec![
cast_ldx_dw(2, 1, 8),
cast_addr_space_cast(2, 2, 1),
cast_ldx_dw(3, 2, 0),
cast_exit(),
];
let cast_map = analyze_casts(
&insns,
&btf,
&[InitialReg {
reg: 1,
struct_type_id: typedef_id,
}],
&[],
&[],
&[],
);
assert_eq!(
cast_map.get(&(t_id, 8)),
Some(&CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena
}),
"analyzer must peel typedef→const→struct and key on T_id={t_id}; got: {cast_map:?}"
);
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let mut outer_bytes = vec![0u8; 16];
outer_bytes[8..16].copy_from_slice(&TARGET_ADDR.to_le_bytes());
let inner_bytes = 0x99u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, typedef_id, &outer_bytes, &reader);
let RenderedValue::Struct {
type_name,
ref members,
} = v
else {
panic!("expected Struct render after peel, got {v:?}");
};
assert_eq!(
type_name.as_deref(),
Some("T"),
"renderer must collapse typedef/const wrappers to underlying T name"
);
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"modifier-chain peel must reach the cast intercept; got {:?}. \
A failure here means the renderer's peel diverges from the \
analyzer's — the integration is broken.",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(deref_skipped_reason.is_none());
let inner = deref.as_deref().expect("chase deref Some");
let RenderedValue::Struct {
type_name: ref inner_name,
..
} = *inner
else {
panic!("deref payload must be Q struct, got {inner:?}");
};
assert_eq!(inner_name.as_deref(), Some("Q"));
}
#[test]
fn cast_pipeline_multi_field_only_flagged_offsets_render_as_ptr() {
let (strings, n_int, n_t, n_q, _n_f, n_x) = cast_strings_for_t_q();
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut strings = strings;
let n_f0 = push(&mut strings, "f0");
let n_f1 = push(&mut strings, "f1");
let n_f2 = push(&mut strings, "f2");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 24,
members: vec![
CastSynMember {
name_off: n_f0,
type_id: 1,
byte_offset: 0,
},
CastSynMember {
name_off: n_f1,
type_id: 1,
byte_offset: 8,
},
CastSynMember {
name_off: n_f2,
type_id: 1,
byte_offset: 16,
},
],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let mut cast_map: CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
cast_map.insert(
(t_id, 8),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const ADDR_F0: u64 = 0x10_0000_1000;
const ADDR_F1: u64 = 0x10_0000_2000;
const COUNTER_F2: u64 = 0x10_0000_3000;
let mut outer_bytes = vec![0u8; 24];
outer_bytes[0..8].copy_from_slice(&ADDR_F0.to_le_bytes());
outer_bytes[8..16].copy_from_slice(&ADDR_F1.to_le_bytes());
outer_bytes[16..24].copy_from_slice(&COUNTER_F2.to_le_bytes());
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(ADDR_F0, 0xAAu64.to_le_bytes().to_vec());
arena_bytes.insert(ADDR_F1, 0xBBu64.to_le_bytes().to_vec());
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
assert_eq!(members.len(), 3, "T has three u64 members");
assert_eq!(members[0].name, "f0");
assert_eq!(members[1].name, "f1");
assert_eq!(members[2].name, "f2");
let RenderedValue::Ptr {
value: f0_value,
ref deref,
..
} = members[0].value
else {
panic!(
"f0 (offset 0) must render as Ptr (cast map hit); got {:?}",
members[0].value
);
};
assert_eq!(f0_value, ADDR_F0);
assert!(deref.is_some(), "f0 chase must succeed (deref Some)");
let RenderedValue::Ptr {
value: f1_value,
ref deref,
..
} = members[1].value
else {
panic!(
"f1 (offset 8) must render as Ptr (cast map hit); got {:?}",
members[1].value
);
};
assert_eq!(f1_value, ADDR_F1);
assert!(deref.is_some(), "f1 chase must succeed (deref Some)");
let RenderedValue::Uint {
bits: f2_bits,
value: f2_value,
} = members[2].value
else {
panic!(
"f2 (offset 16) must render as Uint (no cast map entry); \
got {:?}. A failure here means a hit on one offset is \
contaminating unrelated offsets in the same struct.",
members[2].value
);
};
assert_eq!(f2_bits, 64);
assert_eq!(f2_value, COUNTER_F2);
}
#[test]
fn cast_pipeline_empty_cast_map_renders_uint() {
let (blob, t_id, _q_id) = cast_btf_t_at_offset_8_q_at_offset_0();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let cast_map: CastMap = std::collections::BTreeMap::new();
let reader = CastStubReader {
cast_map: Some(cast_map),
..Default::default()
};
let mut outer_bytes = vec![0u8; 16];
outer_bytes[8..16].copy_from_slice(&0xCAFE_F00Du64.to_le_bytes());
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Uint { bits, value } = members[0].value else {
panic!(
"empty cast map must leave u64 as Uint; got {:?}. A \
failure here means an empty BTreeMap is being treated \
as 'wildcard hit' instead of 'no hits' — a regression \
that would promote every u64 to a phantom pointer.",
members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, 0xCAFE_F00D);
}
#[test]
fn cast_pipeline_wrong_struct_id_does_not_intercept() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut strings = strings;
let n_u = push(&mut strings, "U");
let n_g = push(&mut strings, "g");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_u,
size: 8,
members: vec![CastSynMember {
name_off: n_g,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let u_id: u32 = 4;
let mut cast_map: CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(u_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const VAL: u64 = 0x10_0000_1000;
let outer_bytes = VAL.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(VAL, 0x77u64.to_le_bytes().to_vec());
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Uint { bits, value } = members[0].value else {
panic!(
"(T, 0) must miss the (U, 0) cast entry → render as \
Uint; got {:?}. A failure here means cast_lookup is \
ignoring parent_type_id, which would promote every \
u64 at the entry's offset across every struct in the \
scheduler.",
members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, VAL);
}
#[test]
fn cast_chase_arena_hint_with_non_arena_value_falls_through_to_kernel_arm() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const OUT_OF_WINDOW: u64 = 0x0F_FFFF_FFFF;
let outer_bytes = OUT_OF_WINDOW.to_le_bytes().to_vec();
let mut cast_map: super::super::cast_analysis::CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!("must surface as Ptr; got {:?}", members[0].value);
};
assert_eq!(value, OUT_OF_WINDOW);
assert!(deref.is_none());
let reason = deref_skipped_reason.as_deref().expect("skip reason");
assert!(
reason.contains("read_kva failed"),
"skip reason must mention 'read_kva failed' (kernel arm); got: {reason}"
);
assert!(
reason.contains("cast analysis may have flagged"),
"Arena→kernel runtime dispatch must annotate suffix; got: {reason}"
);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→kernel"),
"runtime kernel dispatch must produce cast→kernel annotation"
);
}
#[test]
fn cast_chase_kernel_target_type_id_unresolvable() {
let (blob, t_id, _q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const UNRESOLVABLE: u32 = 9999;
const KVA: u64 = 0xffff_8000_0000_1000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: UNRESOLVABLE,
addr_space: AddrSpace::Kernel,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"unresolvable target must still surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref.is_none(),
"unresolvable target must not produce a deref payload"
);
let reason = deref_skipped_reason
.as_deref()
.expect("peel_modifiers failure must populate skip reason");
assert!(
reason.contains("unresolvable"),
"skip reason must mention 'unresolvable'; got: {reason}"
);
assert!(
reason.contains(&UNRESOLVABLE.to_string()),
"skip reason must include the offending type id; got: {reason}"
);
}
#[test]
fn cast_chase_kernel_target_btf_size_zero() {
let (strings, n_int, n_t, n_q, n_f, _n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 0,
members: vec![],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
const KVA: u64 = 0xffff_8000_0000_1000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"zero-sized target must still surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(deref.is_none());
let reason = deref_skipped_reason
.as_deref()
.expect("zero-sized target must populate skip reason");
assert!(
reason.contains("BTF size is 0"),
"skip reason must say 'BTF size is 0'; got: {reason}"
);
assert!(
reason.contains("incomplete type"),
"skip reason must mention 'incomplete type'; got: {reason}"
);
}
#[test]
fn cast_chase_kernel_target_fwd_struct() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_fwd = push(&mut strings, "sdt_data");
let n_f = push(&mut strings, "f");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let fwd_id: u32 = 3;
const KVA: u64 = 0xffff_8000_0000_3000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: fwd_id,
addr_space: AddrSpace::Kernel,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"Fwd target must still surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref.is_none(),
"Fwd target must not produce a deref payload"
);
let reason = deref_skipped_reason
.as_deref()
.expect("Fwd target must populate skip reason");
assert!(
reason.contains("forward declaration"),
"skip reason must mention 'forward declaration'; got: {reason}"
);
assert!(
reason.contains("body not in this BTF"),
"skip reason must mention body absence; got: {reason}"
);
assert!(
reason.contains("sdt_data"),
"skip reason must include the Fwd type's name; got: {reason}"
);
assert!(
reason.contains("struct"),
"skip reason must say 'struct' (not 'union') for is_struct() Fwd; got: {reason}"
);
assert!(
reason.contains(&fwd_id.to_string()),
"skip reason must include the type id; got: {reason}"
);
assert!(
!reason.contains("has unresolvable size"),
"Fwd targets must not surface the generic fall-through; got: {reason}"
);
}
#[test]
fn cast_chase_kernel_target_fwd_union() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_fwd = push(&mut strings, "my_union");
let n_f = push(&mut strings, "f");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: true,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let fwd_id: u32 = 3;
const KVA: u64 = 0xffff_8000_0000_4000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: fwd_id,
addr_space: AddrSpace::Kernel,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"Fwd union target must surface as Ptr; got {:?}",
members[0].value
);
};
let reason = deref_skipped_reason
.as_deref()
.expect("Fwd union target must populate skip reason");
assert!(
reason.contains("union my_union"),
"skip reason must surface 'union my_union'; got: {reason}"
);
assert!(
!reason.contains("struct my_union"),
"Fwd union must not be labelled 'struct'; got: {reason}"
);
}
#[test]
fn arena_chase_pointee_fwd_surfaces_descriptive_reason() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "sdt_chunk");
let n_fwd = push(&mut strings, "sdt_data");
let n_data = push(&mut strings, "data");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
CastSynType::Ptr { type_id: 2 },
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 3,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let chunk_id: u32 = 4;
let fwd_id: u32 = 2;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, chunk_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
assert_eq!(members.len(), 1);
assert_eq!(members[0].name, "data");
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"data field must render as Ptr (BTF Type::Ptr arm); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
cast_annotation.is_none(),
"BTF-typed pointers must leave cast_annotation None; got {cast_annotation:?}"
);
assert!(
deref.is_none(),
"Fwd pointee chase must not produce a deref payload"
);
let reason = deref_skipped_reason
.as_deref()
.expect("Fwd pointee must populate skip reason");
assert!(
reason.starts_with("arena chase"),
"BTF Ptr arm must use 'arena chase' label; got: {reason}"
);
assert!(
reason.contains("forward declaration"),
"skip reason must mention 'forward declaration'; got: {reason}"
);
assert!(
reason.contains("body not in this BTF"),
"skip reason must mention body absence; got: {reason}"
);
assert!(
reason.contains("sdt_data"),
"skip reason must include the Fwd type's name; got: {reason}"
);
assert!(
reason.contains("struct"),
"skip reason must say 'struct' (kind_flag=0); got: {reason}"
);
assert!(
reason.contains(&fwd_id.to_string()),
"skip reason must include the Fwd type id; got: {reason}"
);
assert!(
!reason.contains("has unresolvable size"),
"Fwd targets must not surface the legacy generic message; got: {reason}"
);
}
#[test]
fn arena_chase_pointee_fwd_anonymous() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "wrap");
let n_data = push(&mut strings, "data");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: 0,
is_union: false,
},
CastSynType::Ptr { type_id: 2 },
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 3,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let chunk_id: u32 = 4;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, chunk_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!("data field must render as Ptr; got {:?}", members[0].value);
};
let reason = deref_skipped_reason
.as_deref()
.expect("anonymous Fwd must populate skip reason");
assert!(
reason.contains("anonymous"),
"anonymous Fwd reason must say 'anonymous'; got: {reason}"
);
assert!(
reason.contains("struct forward declaration"),
"anonymous Fwd reason must mention the aggregate kind; got: {reason}"
);
}
#[test]
fn arena_chase_pointee_fwd_resolves_to_complete_struct_sibling() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "scx_task_map_val");
let n_data = push(&mut strings, "data");
let n_task_ctx = push(&mut strings, "task_ctx");
let n_field = push(&mut strings, "field");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: n_task_ctx,
is_union: false,
},
CastSynType::Ptr { type_id: 2 },
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 3,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_task_ctx,
size: 8,
members: vec![CastSynMember {
name_off: n_field,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let outer_id: u32 = 4;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x77u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, outer_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"data field must render as Ptr (BTF Type::Ptr arm); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref_skipped_reason.is_none(),
"Fwd-resolved chase must succeed; got skip reason: {deref_skipped_reason:?}"
);
let payload = deref
.as_deref()
.expect("Fwd-resolved chase must produce a deref payload");
let RenderedValue::Struct {
ref type_name,
members: ref inner_members,
} = *payload
else {
panic!("deref must be Struct render; got {payload:?}");
};
assert_eq!(
type_name.as_deref(),
Some("task_ctx"),
"deref must carry the resolved Struct name"
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "field");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!(
"inner field must decode as Uint; got {:?}",
inner_members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x77);
}
#[test]
fn cast_chase_arena_target_fwd_resolves_to_complete_struct_sibling() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_u64 = push(&mut strings, "u64");
let n_t = push(&mut strings, "scx_task_map_val");
let n_data = push(&mut strings, "data");
let n_target = push(&mut strings, "task_ctx");
let n_field = push(&mut strings, "field");
let types = vec![
CastSynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_target,
is_union: false,
},
CastSynType::Struct {
name_off: n_target,
size: 8,
members: vec![CastSynMember {
name_off: n_field,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let parent_id: u32 = 2;
let fwd_target_id: u32 = 3;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0xABCDu64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(parent_id, 0),
CastHit {
alloc_size: None,
target_type_id: fwd_target_id,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, parent_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"data field must render as cast-recovered Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→arena"),
"cast intercept must annotate the arena chase"
);
assert!(
deref_skipped_reason.is_none(),
"Fwd-resolved cast chase must not skip; got: {deref_skipped_reason:?}"
);
let payload = deref
.as_deref()
.expect("Fwd-resolved cast chase must produce deref payload");
let RenderedValue::Struct {
ref type_name,
members: ref inner_members,
} = *payload
else {
panic!("deref must be Struct render; got {payload:?}");
};
assert_eq!(
type_name.as_deref(),
Some("task_ctx"),
"deref must carry the resolved Struct name"
);
assert_eq!(inner_members[0].name, "field");
let RenderedValue::Uint { value, .. } = inner_members[0].value else {
panic!(
"inner field must decode as Uint; got {:?}",
inner_members[0].value
);
};
assert_eq!(value, 0xABCD);
}
#[test]
fn cast_chase_kernel_target_fwd_resolves_to_complete_struct_sibling() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_u64 = push(&mut strings, "u64");
let n_t = push(&mut strings, "parent");
let n_data = push(&mut strings, "data");
let n_target = push(&mut strings, "kernel_target");
let n_field = push(&mut strings, "field");
let types = vec![
CastSynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_target,
is_union: false,
},
CastSynType::Struct {
name_off: n_target,
size: 8,
members: vec![CastSynMember {
name_off: n_field,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let parent_id: u32 = 2;
let fwd_target_id: u32 = 3;
const KVA: u64 = 0xffff_8000_0000_3000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let inner_bytes = 0xDEADBEEFu64.to_le_bytes().to_vec();
let mut kva_bytes = std::collections::HashMap::new();
kva_bytes.insert(KVA, inner_bytes);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(parent_id, 0),
CastHit {
alloc_size: None,
target_type_id: fwd_target_id,
addr_space: AddrSpace::Kernel,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
kva_bytes_at: kva_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, parent_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"data field must render as cast-recovered Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert_eq!(cast_annotation.as_deref(), Some("cast→kernel"));
assert!(
deref_skipped_reason.is_none(),
"Fwd-resolved kernel cast chase must not skip; got: {deref_skipped_reason:?}"
);
let payload = deref
.as_deref()
.expect("Fwd-resolved kernel cast chase must produce deref payload");
let RenderedValue::Struct {
ref type_name,
members: ref inner_members,
} = *payload
else {
panic!("deref must be Struct render; got {payload:?}");
};
assert_eq!(
type_name.as_deref(),
Some("kernel_target"),
"deref must carry the resolved Struct name"
);
let RenderedValue::Uint { value, .. } = inner_members[0].value else {
panic!(
"inner field must decode as Uint; got {:?}",
inner_members[0].value
);
};
assert_eq!(value, 0xDEADBEEF);
}
#[test]
fn fwd_shortcut_rejects_aggregate_kind_mismatch() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_u64 = push(&mut strings, "u64");
let n_wrap = push(&mut strings, "wrap");
let n_data = push(&mut strings, "data");
let n_foo = push(&mut strings, "foo");
let n_x = push(&mut strings, "x");
let types = vec![
CastSynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: n_foo,
is_union: false,
},
CastSynType::Ptr { type_id: 2 },
CastSynType::Struct {
name_off: n_wrap,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 3,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_foo,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let mut blob = cast_build_btf(&types, &strings);
let id5_info_off: usize = 24 + 16 + 12 + 12 + 24 + 4;
let info = u32::from_le_bytes(blob[id5_info_off..id5_info_off + 4].try_into().unwrap());
let new_info = (info & !(0x1f << 24)) | (5u32 << 24);
blob[id5_info_off..id5_info_off + 4].copy_from_slice(&new_info.to_le_bytes());
let btf = Btf::from_bytes(&blob).expect("synthetic BTF with union parses");
let wrap_id: u32 = 4;
let fwd_id: u32 = 2;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, wrap_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!("data field must render as Ptr; got {:?}", members[0].value);
};
assert!(
deref.is_none(),
"aggregate-kind mismatch must NOT resolve the Fwd; chase must skip"
);
let reason = deref_skipped_reason
.as_deref()
.expect("aggregate-kind mismatch must populate skip reason (Fwd unresolved)");
assert!(
reason.contains("forward declaration"),
"skip reason must report the Fwd; got: {reason}"
);
assert!(
reason.contains("foo"),
"skip reason must include the Fwd's name; got: {reason}"
);
assert!(
reason.contains(&fwd_id.to_string()),
"skip reason must include the Fwd's id; got: {reason}"
);
}
#[test]
fn peel_modifiers_resolving_fwd_no_sibling_returns_fwd() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_fwd = push(&mut strings, "lonely_fwd");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let (peeled, peeled_id) =
peel_modifiers_resolving_fwd(&btf, 2).expect("Fwd resolves through helper");
assert!(
matches!(peeled, Type::Fwd(_)),
"no-sibling lookup must return the original Fwd; got {peeled:?}"
);
assert_eq!(peeled_id, 2);
}
#[test]
fn peel_modifiers_resolving_fwd_anonymous_fwd_returns_fwd() {
let strings: Vec<u8> = vec![0];
let types = vec![
CastSynType::Int {
name_off: 0,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: 0,
is_union: false,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let (peeled, peeled_id) =
peel_modifiers_resolving_fwd(&btf, 2).expect("anonymous Fwd resolves through helper");
assert!(
matches!(peeled, Type::Fwd(_)),
"anonymous Fwd must remain Fwd; got {peeled:?}"
);
assert_eq!(peeled_id, 2);
}
#[test]
fn peel_modifiers_resolving_fwd_through_typedef_chain() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_u64 = push(&mut strings, "u64");
let n_alias = push(&mut strings, "alias");
let n_target = push(&mut strings, "target");
let n_field = push(&mut strings, "field");
let types = vec![
CastSynType::Int {
name_off: n_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Typedef {
name_off: n_alias,
type_id: 3,
},
CastSynType::Fwd {
name_off: n_target,
is_union: false,
},
CastSynType::Struct {
name_off: n_target,
size: 8,
members: vec![CastSynMember {
name_off: n_field,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let (peeled, peeled_id) =
peel_modifiers_resolving_fwd(&btf, 2).expect("Typedef→Fwd→Struct chain resolves");
assert!(
matches!(peeled, Type::Struct(_)),
"Typedef→Fwd chain must land on the complete Struct; got {peeled:?}"
);
assert_eq!(
peeled_id, 4,
"resolved id must be the complete Struct's id, not the Typedef or Fwd id"
);
}
#[test]
fn cast_chase_kernel_read_kva_failure() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const KVA: u64 = 0xffff_8000_0000_2000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
}),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"read_kva failure must still surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref.is_none(),
"read_kva failure must not produce a deref payload"
);
let reason = deref_skipped_reason
.as_deref()
.expect("read_kva failure must populate skip reason");
assert!(
reason.contains("read_kva failed"),
"skip reason must say 'read_kva failed'; got: {reason}"
);
assert!(
reason.contains(&format!("0x{KVA:x}")),
"skip reason must include the failing address in hex; got: {reason}"
);
assert!(
reason.contains("needed"),
"skip reason must include the requested byte count; got: {reason}"
);
}
#[test]
fn cast_chase_kernel_page_edge_truncation() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 100,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
const KVA: u64 = 0xffff_8000_0000_0ff0;
let outer_bytes = KVA.to_le_bytes().to_vec();
let mut target_bytes = vec![0u8; 16];
target_bytes[0..8].copy_from_slice(&0xCAFEu64.to_le_bytes());
let mut kva_bytes = std::collections::HashMap::new();
kva_bytes.insert(KVA, target_bytes);
let mut cast_map: super::super::cast_analysis::CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
kva_bytes_at: kva_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"page-edge clipped chase must still surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref_skipped_reason.is_none(),
"successful (clipped) read must carry no skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("read succeeded → deref must be Some");
let RenderedValue::Truncated {
needed,
had,
ref partial,
} = *inner
else {
panic!("btf_size > read_size must wrap deref payload in Truncated; got {inner:?}");
};
assert_eq!(needed, 100, "Truncated.needed must be the BTF size");
assert_eq!(
had, 16,
"Truncated.had must be the page-edge-clipped read size"
);
let inner_struct = match &**partial {
RenderedValue::Struct { .. } => partial.as_ref(),
RenderedValue::Truncated {
partial: deeper, ..
} => deeper.as_ref(),
other => panic!(
"partial render must reach a Q struct (possibly via inner Truncated); got {other:?}"
),
};
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner_struct
else {
panic!("expected inner Struct render, got {inner_struct:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"inner struct must carry Q's name"
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "x");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(bits, 64);
assert_eq!(value, 0xCAFE, "first 8 bytes of clipped read must decode");
}
#[test]
fn cast_chase_kernel_successful_chase_top_byte_non_ff() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const KVA: u64 = 0xffff_8000_dead_b000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let inner_bytes: Vec<u8> = 0x42u64.to_le_bytes().to_vec();
let mut kva_bytes = std::collections::HashMap::new();
kva_bytes.insert(KVA, inner_bytes);
let mut cast_map: super::super::cast_analysis::CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Kernel,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
kva_bytes_at: kva_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"kernel chase must surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref_skipped_reason.is_none(),
"successful chase carries no skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("plausibility-allowed chase → deref must be Some");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the rendered Q struct; got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"deref payload must carry Q's name"
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "x");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x42, "Q.x must reflect the bytes read_kva returned");
}
#[test]
fn cast_chase_arena_pointee_exceeds_cap_wraps_in_truncated() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 5000,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let mut target_bytes = vec![0u8; 4096];
target_bytes[0..8].copy_from_slice(&0x77u64.to_le_bytes());
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, target_bytes);
let mut cast_map: super::super::cast_analysis::CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"cap-clamped chase must still surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref_skipped_reason.is_none(),
"cap-clamped read is a SUCCESS; no skip reason expected; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("read succeeded → deref must be Some");
let RenderedValue::Truncated {
needed,
had,
ref partial,
} = *inner
else {
panic!("btf_size > POINTER_CHASE_CAP must wrap deref in Truncated; got {inner:?}");
};
assert_eq!(needed, 5000, "Truncated.needed must be Q's BTF size");
assert_eq!(
had, 4096,
"Truncated.had must equal POINTER_CHASE_CAP (4096)"
);
let inner_struct = match &**partial {
RenderedValue::Struct { .. } => partial.as_ref(),
RenderedValue::Truncated {
partial: deeper, ..
} => deeper.as_ref(),
other => panic!(
"partial render must reach a Q struct (possibly via inner Truncated); got {other:?}"
),
};
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner_struct
else {
panic!("expected inner Struct render, got {inner_struct:?}");
};
assert_eq!(inner_name.as_deref(), Some("Q"));
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(bits, 64);
assert_eq!(
value, 0x77,
"first 8 bytes of cap-clamped read must decode correctly"
);
}
#[test]
fn cast_intercept_u64_at_parent_bytes_boundary_falls_through() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let outer_bytes = vec![0xCA, 0xFE, 0xBA, 0xBE];
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(0xBEBA_FECAu64, vec![0u8; 8]);
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
arena_window: Some((0, u64::MAX)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let outer_struct = match &v {
RenderedValue::Struct { .. } => &v,
RenderedValue::Truncated { partial, .. } => partial.as_ref(),
other => panic!("expected Struct or Truncated{{Struct}}; got {other:?}"),
};
let RenderedValue::Struct { ref members, .. } = *outer_struct else {
panic!("expected Struct under outer Truncated; got {outer_struct:?}");
};
match &members[0].value {
RenderedValue::Truncated { needed, had, .. } => {
assert_eq!(*needed, 8, "u64 needs 8 bytes");
assert_eq!(*had, 4, "supplied bytes for member is 4");
}
RenderedValue::Ptr { .. } => panic!(
"boundary fall-through must NOT produce Ptr — intercept's \
boundary guard (`field_bytes.get(..8)?` in \
try_cast_intercept) short-circuits; got {:?}",
members[0].value
),
other => panic!("boundary fall-through must produce Truncated; got {other:?}"),
}
}
#[test]
fn cast_intercept_bool_field_not_intercepted() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 4,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let outer_bytes = 1u64.to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
arena_window: Some((0, u64::MAX)),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
match &members[0].value {
RenderedValue::Bool { value } => {
assert!(*value, "bool value 0x01 must render as true");
}
RenderedValue::Ptr { .. } => panic!(
"_Bool field must NOT be intercepted (int.is_bool() gate at \
encoding gate in try_cast_intercept rejects); got {:?}",
members[0].value
),
other => panic!("_Bool field must render as Bool; got {other:?}"),
}
}
#[test]
fn cast_intercept_signed_8byte_int_not_intercepted() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 1,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let outer_bytes = (-1i64).to_le_bytes().to_vec();
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
arena_window: Some((0, u64::MAX)),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
match &members[0].value {
RenderedValue::Int { bits, value } => {
assert_eq!(
*bits, 64,
"signed int must render at its declared 64-bit width"
);
assert_eq!(*value, -1, "signed -1 must round-trip as Int{{value: -1}}");
}
RenderedValue::Ptr { .. } => panic!(
"signed 8-byte int must NOT be intercepted (int.is_signed() \
encoding gate in try_cast_intercept rejects); got {:?}",
members[0].value
),
other => panic!("signed 8-byte int must render as Int; got {other:?}"),
}
}
#[test]
fn cast_intercept_parent_type_id_none_does_not_crash() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let Type::Struct(t_struct) = btf.resolve_type_by_id(t_id).expect("T resolves") else {
panic!("T_id resolves to non-Struct type");
};
let m = t_struct.members.first().expect("T has one member");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, 0xAAu64.to_le_bytes().to_vec());
let reader = CastStubReader {
hit: Some(CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
}),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let mut visited: std::collections::HashSet<u64> = std::collections::HashSet::new();
let v = render_member(
&btf,
m,
None,
&outer_bytes,
0,
Some(&reader as &dyn MemReader),
&mut visited,
);
let RenderedValue::Uint { bits, value } = v else {
panic!(
"parent_type_id=None must short-circuit the intercept and \
render as Uint; got {v:?}. A failure here means \
render_member's `let parent = parent_type_id?` guard at \
`parent_type_id.and_then(...)` guard in render_member \
was bypassed."
);
};
assert_eq!(bits, 64);
assert_eq!(value, TARGET_ADDR);
}
#[test]
fn cast_chase_recursive_target_with_inner_cast_field() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut strings = strings;
let n_r = push(&mut strings, "R");
let n_y = push(&mut strings, "y");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_r,
size: 8,
members: vec![CastSynMember {
name_off: n_y,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let r_id: u32 = 4;
let mut cast_map: super::super::cast_analysis::CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
cast_map.insert(
(q_id, 0),
CastHit {
alloc_size: None,
target_type_id: r_id,
addr_space: AddrSpace::Arena,
},
);
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
const TARGET_ADDR_2: u64 = 0x10_0000_2000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let q_bytes: Vec<u8> = TARGET_ADDR_2.to_le_bytes().to_vec();
let r_bytes: Vec<u8> = 0xBBu64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, q_bytes);
arena_bytes.insert(TARGET_ADDR_2, r_bytes);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value: outer_value,
deref: ref outer_deref,
deref_skipped_reason: ref outer_reason,
..
} = members[0].value
else {
panic!(
"outer cast intercept must surface as Ptr; got {:?}",
members[0].value
);
};
assert_eq!(outer_value, TARGET_ADDR);
assert!(
outer_reason.is_none(),
"outer chase must succeed; got {outer_reason:?}"
);
let q_inner = outer_deref
.as_deref()
.expect("outer deref Some (chase succeeded)");
let RenderedValue::Struct {
type_name: ref q_name,
members: ref q_members,
} = *q_inner
else {
panic!("outer deref must be Q struct; got {q_inner:?}");
};
assert_eq!(q_name.as_deref(), Some("Q"));
assert_eq!(q_members.len(), 1);
assert_eq!(q_members[0].name, "x");
let RenderedValue::Ptr {
value: inner_value,
deref: ref inner_deref,
deref_skipped_reason: ref inner_reason,
..
} = q_members[0].value
else {
panic!(
"inner Q.x must surface as Ptr (recursive cast hit); got {:?}. \
A failure here means the renderer didn't pass Q_id as \
parent_type_id when recursing through the deref payload.",
q_members[0].value
);
};
assert_eq!(inner_value, TARGET_ADDR_2);
assert!(
inner_reason.is_none(),
"inner chase must succeed; got {inner_reason:?}"
);
let r_inner = inner_deref
.as_deref()
.expect("inner deref Some (chase succeeded)");
let RenderedValue::Struct {
type_name: ref r_name,
members: ref r_members,
} = *r_inner
else {
panic!("inner deref must be R struct; got {r_inner:?}");
};
assert_eq!(r_name.as_deref(), Some("R"));
assert_eq!(r_members.len(), 1);
assert_eq!(r_members[0].name, "y");
let RenderedValue::Uint { bits, value } = r_members[0].value else {
panic!(
"R.y must terminate as Uint (no recursive cast entry at (R,0)); \
got {:?}",
r_members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, 0xBB);
}
#[test]
fn cast_intercept_modifier_chain_parent_uses_post_peel_id() {
let (strings, n_int, n_t, n_q, n_f, n_x) = cast_strings_for_t_q();
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut strings = strings;
let n_typedef = push(&mut strings, "T_alias");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_q,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Const { type_id: 2 },
CastSynType::Typedef {
name_off: n_typedef,
type_id: 4,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let q_id: u32 = 3;
let typedef_id: u32 = 5;
let mut cast_map: super::super::cast_analysis::CastMap = std::collections::BTreeMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: q_id,
addr_space: AddrSpace::Arena,
},
);
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, 0x55u64.to_le_bytes().to_vec());
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
..Default::default()
};
let v = render_value_with_mem(&btf, typedef_id, &outer_bytes, &reader);
let RenderedValue::Struct {
type_name,
ref members,
} = v
else {
panic!("expected Struct render, got {v:?}");
};
assert_eq!(
type_name.as_deref(),
Some("T"),
"renderer must collapse modifier wrappers to underlying T name"
);
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"modifier-chain rendering must reach the cast intercept (peel \
must produce T_id as parent_type_id); got {:?}. A failure here \
means peel_modifiers_with_id forwarded the typedef wrapper id \
instead of the post-peel struct id when calling \
render_struct.",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(deref_skipped_reason.is_none());
let inner = deref.as_deref().expect("chase deref Some");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be Q struct, got {inner:?}");
};
assert_eq!(inner_name.as_deref(), Some("Q"));
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x55);
}
#[test]
fn mem_reader_default_resolve_arena_type_is_none() {
struct DefaultReader;
impl MemReader for DefaultReader {
fn read_kva(&self, _: u64, _: usize) -> Option<Vec<u8>> {
None
}
}
let r = DefaultReader;
assert!(
r.resolve_arena_type(0x10_0000_1000).is_none(),
"default resolve_arena_type must return None for any address",
);
assert!(
r.resolve_arena_type(0).is_none(),
"default resolve_arena_type must return None for null too",
);
assert!(
r.resolve_arena_type(u64::MAX).is_none(),
"default resolve_arena_type must return None for u64::MAX too",
);
}
#[test]
fn mem_reader_resolve_arena_type_override_returns_configured_hit() {
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
0x10_0000_1008u64,
ArenaResolveHit {
target_type_id: 7,
header_skip: 0,
},
);
arena_types.insert(
0x10_0000_2000u64,
ArenaResolveHit {
target_type_id: 11,
header_skip: 8,
},
);
let reader = CastStubReader {
arena_type_at: arena_types,
..Default::default()
};
assert_eq!(
reader.resolve_arena_type(0x10_0000_1008),
Some(ArenaResolveHit {
target_type_id: 7,
header_skip: 0,
}),
);
assert_eq!(
reader.resolve_arena_type(0x10_0000_2000),
Some(ArenaResolveHit {
target_type_id: 11,
header_skip: 8,
}),
);
assert!(
reader.resolve_arena_type(0x10_0000_3000).is_none(),
"address not in index must return None",
);
assert!(
reader.resolve_arena_type(0).is_none(),
"null address must return None",
);
}
fn bridge_btf_outer_fwd_taskctx() -> (Vec<u8>, u32, u32, u32) {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_outer = push(&mut strings, "outer");
let n_fwd = push(&mut strings, "sdt_data");
let n_data = push(&mut strings, "data");
let n_task = push(&mut strings, "task_ctx");
let n_weight = push(&mut strings, "weight");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
CastSynType::Ptr { type_id: 2 },
CastSynType::Struct {
name_off: n_outer,
size: 8,
members: vec![CastSynMember {
name_off: n_data,
type_id: 3,
byte_offset: 0,
}],
},
CastSynType::Struct {
name_off: n_task,
size: 8,
members: vec![CastSynMember {
name_off: n_weight,
type_id: 1,
byte_offset: 0,
}],
},
];
(cast_build_btf(&types, &strings), 4, 2, 5)
}
#[test]
fn arena_chase_fwd_target_resolved_via_bridge() {
let (blob, outer_id, _fwd_id, task_ctx_id) = bridge_btf_outer_fwd_taskctx();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
TARGET_ADDR,
ArenaResolveHit {
target_type_id: task_ctx_id,
header_skip: 0,
},
);
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, outer_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
assert_eq!(members.len(), 1);
assert_eq!(members[0].name, "data");
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"data field must render as Ptr (BTF Type::Ptr arm); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref_skipped_reason.is_none(),
"bridge resolve must not surface a skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("bridge resolve must produce a deref");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the resolved task_ctx struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("task_ctx"),
"bridge must land on the resolved struct's name, not the Fwd's name"
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "weight");
let RenderedValue::Uint { bits, value } = inner_members[0].value else {
panic!(
"task_ctx.weight must render as Uint, got {:?}",
inner_members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(value, 0x42);
assert_eq!(
cast_annotation.as_deref(),
Some("sdt_alloc"),
"Type::Ptr arm bridge resolve must surface 'sdt_alloc' annotation",
);
}
#[test]
fn arena_chase_fwd_target_resolved_via_bridge_slot_start_skips_header() {
let (blob, outer_id, _fwd_id, task_ctx_id) = bridge_btf_outer_fwd_taskctx();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const SLOT_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = SLOT_ADDR.to_le_bytes().to_vec();
let header_sentinel = 0xDEAD_BEEF_CAFE_BABEu64.to_le_bytes();
let payload_bytes = 0x42u64.to_le_bytes();
let mut slot_bytes = Vec::with_capacity(16);
slot_bytes.extend_from_slice(&header_sentinel);
slot_bytes.extend_from_slice(&payload_bytes);
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(SLOT_ADDR, slot_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
SLOT_ADDR,
ArenaResolveHit {
target_type_id: task_ctx_id,
header_skip: 8,
},
);
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, outer_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!("data field must render as Ptr; got {:?}", members[0].value);
};
assert_eq!(value, SLOT_ADDR);
assert!(
deref_skipped_reason.is_none(),
"slot-start bridge resolve must not surface a skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("slot-start bridge resolve must produce a deref");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the resolved task_ctx struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("task_ctx"),
"bridge must land on the resolved struct's name even with slot-start skip",
);
let RenderedValue::Uint {
bits,
value: weight,
} = inner_members[0].value
else {
panic!(
"task_ctx.weight must render as Uint, got {:?}",
inner_members[0].value
);
};
assert_eq!(bits, 64);
assert_eq!(
weight, 0x42,
"slot-start chase must skip header — weight \
must be payload value 0x42, not header sentinel \
0xDEADBEEFCAFEBABE",
);
assert_eq!(
cast_annotation.as_deref(),
Some("sdt_alloc"),
"Type::Ptr arm slot-start bridge resolve must surface 'sdt_alloc' annotation",
);
}
#[test]
fn arena_chase_fwd_target_no_bridge_entry_skips() {
let (blob, outer_id, _fwd_id, _task_ctx_id) = bridge_btf_outer_fwd_taskctx();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, outer_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
..
} = members[0].value
else {
panic!("data field must render as Ptr; got {:?}", members[0].value);
};
assert!(
deref.is_none(),
"no-bridge Fwd target must not produce a deref"
);
assert!(
cast_annotation.is_none(),
"no-bridge resolve must leave cast_annotation None on the Type::Ptr arm; got {cast_annotation:?}",
);
let reason = deref_skipped_reason
.as_deref()
.expect("Fwd-no-bridge must populate skip reason");
assert!(
reason.contains("forward declaration"),
"skip reason must surface the forward-declaration cause; got: {reason}",
);
assert!(
reason.contains("sdt_data"),
"skip reason must include the Fwd type name; got: {reason}",
);
}
#[test]
fn cast_chase_arena_fwd_target_resolved_via_bridge() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_fwd = push(&mut strings, "sdt_data");
let n_task = push(&mut strings, "task_ctx");
let n_f = push(&mut strings, "f");
let n_weight = push(&mut strings, "weight");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
CastSynType::Struct {
name_off: n_task,
size: 8,
members: vec![CastSynMember {
name_off: n_weight,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let local_fwd_id: u32 = 3;
let local_task_ctx_id: u32 = 4;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x55u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
TARGET_ADDR,
ArenaResolveHit {
target_type_id: local_task_ctx_id,
header_skip: 0,
},
);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: local_fwd_id,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"intercept must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref_skipped_reason.is_none(),
"successful chase: no skip reason; got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("bridge-resolved cast must produce a deref");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the resolved task_ctx Struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("task_ctx"),
"bridge must land on the resolved struct, not the Fwd"
);
assert_eq!(inner_members.len(), 1);
let RenderedValue::Uint { value, .. } = inner_members[0].value else {
panic!(
"task_ctx.weight must render as Uint, got {:?}",
inner_members[0].value
);
};
assert_eq!(value, 0x55);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→arena (sdt_alloc)"),
"cast intercept arena bridge must extend annotation with '(sdt_alloc)'",
);
}
#[test]
fn cast_chase_arena_fwd_target_no_bridge_keeps_plain_annotation() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_fwd = push(&mut strings, "sdt_data");
let n_f = push(&mut strings, "f");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let fwd_id: u32 = 3;
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: fwd_id,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
..
} = members[0].value
else {
panic!(
"intercept must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert!(
deref.is_none(),
"no-bridge Fwd cast must not produce a deref"
);
let reason = deref_skipped_reason
.as_deref()
.expect("Fwd-no-bridge must populate skip reason");
assert!(
reason.contains("forward declaration"),
"skip reason must surface forward-declaration cause; got: {reason}",
);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→arena"),
"no-bridge cast annotation must NOT include '(sdt_alloc)'; got {cast_annotation:?}",
);
}
#[test]
fn cast_chase_kernel_fwd_target_resolved_via_bridge() {
let mut strings: Vec<u8> = vec![0];
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let n_int = push(&mut strings, "u64");
let n_t = push(&mut strings, "T");
let n_fwd = push(&mut strings, "kern_fwd");
let n_real = push(&mut strings, "kern_real");
let n_f = push(&mut strings, "f");
let n_x = push(&mut strings, "x");
let types = vec![
CastSynType::Int {
name_off: n_int,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_t,
size: 8,
members: vec![CastSynMember {
name_off: n_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_fwd,
is_union: false,
},
CastSynType::Struct {
name_off: n_real,
size: 8,
members: vec![CastSynMember {
name_off: n_x,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob = cast_build_btf(&types, &strings);
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
let t_id: u32 = 2;
let fwd_id: u32 = 3;
let real_id: u32 = 4;
const KVA: u64 = 0xffff_8000_0000_4000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let inner_bytes = 0x77u64.to_le_bytes().to_vec();
let mut kva_bytes = std::collections::HashMap::new();
kva_bytes.insert(KVA, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
KVA,
ArenaResolveHit {
target_type_id: real_id,
header_skip: 0,
},
);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: fwd_id,
addr_space: AddrSpace::Kernel,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
kva_bytes_at: kva_bytes,
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
..
} = members[0].value
else {
panic!("intercept must produce Ptr; got {:?}", members[0].value);
};
assert!(
deref_skipped_reason.is_none(),
"kernel arm bridge resolve must succeed; got skip reason {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("kernel arm bridge resolve must produce a deref");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be kern_real Struct, got {inner:?}");
};
assert_eq!(inner_name.as_deref(), Some("kern_real"));
let RenderedValue::Uint { value, .. } = inner_members[0].value else {
panic!(
"kern_real.x must render as Uint, got {:?}",
inner_members[0].value
);
};
assert_eq!(value, 0x77);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→kernel (sdt_alloc)"),
"kernel arm bridge must extend annotation with '(sdt_alloc)'",
);
}
#[test]
fn cast_chase_arena_target_type_id_zero_resolves_via_resolve_arena_type() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
TARGET_ADDR,
ArenaResolveHit {
target_type_id: q_id,
header_skip: 0,
},
);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"intercept must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref_skipped_reason.is_none(),
"deferred-resolve bridge fire must not surface a skip reason; \
got {deref_skipped_reason:?}"
);
let inner = deref
.as_deref()
.expect("deferred-resolve bridge must produce a deref");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = *inner
else {
panic!("deref payload must be the resolved Q Struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"bridge must land on the resolved struct's name (Q), \
not the analyzer's deferred sentinel",
);
let RenderedValue::Uint { value, .. } = inner_members[0].value else {
panic!("Q.x must render as Uint, got {:?}", inner_members[0].value);
};
assert_eq!(value, 0x42);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→arena (sdt_alloc)"),
"deferred-resolve bridge fire must extend annotation with \
'(sdt_alloc)' since `outcome.sdt_alloc_resolved` is set; \
got {cast_annotation:?}",
);
}
#[test]
fn cast_chase_arena_target_type_id_zero_no_bridge_entry_skips() {
let (blob, t_id, _q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"intercept must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref.is_none(),
"no-bridge deferred-resolve must not produce a deref"
);
let reason = deref_skipped_reason
.as_deref()
.expect("no-bridge deferred-resolve must populate skip reason");
assert!(
reason.contains("STX-flow path tagged slot as Arena"),
"skip reason must surface the analyzer's STX-flow tag cause; \
got: {reason}",
);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→arena"),
"no-bridge deferred-resolve must NOT include '(sdt_alloc)' suffix; \
got {cast_annotation:?}",
);
}
#[test]
fn cast_chase_already_rendered_short_circuits() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
TARGET_ADDR,
ArenaResolveHit {
target_type_id: q_id,
header_skip: 0,
},
);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
},
);
let mut rendered = std::collections::HashSet::new();
rendered.insert(TARGET_ADDR as u32);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
rendered_slot_addrs: rendered,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"dedup must still produce Ptr (only the deref is suppressed); \
got {:?}",
members[0].value
);
};
assert_eq!(value, TARGET_ADDR);
assert!(
deref.is_none(),
"dedup short-circuit must suppress the deref"
);
let reason = deref_skipped_reason
.as_deref()
.expect("dedup must populate the skip reason");
assert_eq!(
reason, "already rendered in sdt_allocations",
"dedup skip reason is wire-stable (operator reads it from \
RenderedValue::Ptr::deref_skipped_reason); the exact format \
is part of the dump's machine-checkable contract: got '{reason}'"
);
}
#[test]
fn cast_chase_already_rendered_miss_proceeds_with_normal_chase() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
const RENDERED_OTHER_ADDR: u64 = 0x10_0000_2000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
TARGET_ADDR,
ArenaResolveHit {
target_type_id: q_id,
header_skip: 0,
},
);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
},
);
let mut rendered = std::collections::HashSet::new();
rendered.insert(RENDERED_OTHER_ADDR as u32);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
rendered_slot_addrs: rendered,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr { ref deref, .. } = members[0].value else {
panic!("expected Ptr, got {:?}", members[0].value);
};
let inner = deref
.as_deref()
.expect("dedup-miss path must still produce a deref via the bridge");
let RenderedValue::Struct {
type_name: ref inner_name,
..
} = *inner
else {
panic!("deref payload must be the resolved Q Struct, got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("Q"),
"dedup-miss path must land on Q via the normal chase pipeline",
);
}
#[test]
fn cast_chase_default_is_already_rendered_returns_false() {
let (blob, t_id, q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_1000;
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let inner_bytes = 0x42u64.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, inner_bytes);
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
TARGET_ADDR,
ArenaResolveHit {
target_type_id: q_id,
header_skip: 0,
},
);
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr { ref deref, .. } = members[0].value else {
panic!("expected Ptr, got {:?}", members[0].value);
};
assert!(
deref.is_some(),
"empty rendered_slot_addrs must NOT short-circuit the chase",
);
}
#[test]
fn cast_chase_kernel_target_type_id_zero_falls_through_with_mismatch_reason() {
let (blob, t_id, _q_id) = cast_btf_t_and_q();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const KVA: u64 = 0xffff_8000_0000_4000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: 0,
addr_space: AddrSpace::Arena,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
..Default::default()
};
let v = render_value_with_mem(&btf, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
value,
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
} = members[0].value
else {
panic!(
"intercept must produce Ptr (not Uint); got {:?}",
members[0].value
);
};
assert_eq!(value, KVA);
assert!(
deref.is_none(),
"kernel-arm `target_type_id == 0` special case must skip the chase",
);
let reason = deref_skipped_reason
.as_deref()
.expect("kernel-arm `target_type_id == 0` must populate skip reason");
assert!(
reason.contains("kernel cast target unresolved"),
"skip reason must mention `kernel cast target unresolved`; \
got: {reason}",
);
assert!(
reason.contains("analyzer hinted Arena with deferred resolve"),
"skip reason must surface the analyzer-hint / runtime-window \
mismatch; got: {reason}",
);
assert_eq!(
cast_annotation.as_deref(),
Some("cast→kernel"),
"kernel-arm fall-through must use `cast→kernel` annotation \
(the path actually taken); got {cast_annotation:?}",
);
}
#[test]
fn arena_chase_bridge_address_outside_window_is_no_op() {
let (blob, outer_id, _fwd_id, task_ctx_id) = bridge_btf_outer_fwd_taskctx();
let btf = Btf::from_bytes(&blob).expect("synthetic BTF parses");
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const OUT_OF_WINDOW: u64 = 0x0F_0000_1000;
let outer_bytes = OUT_OF_WINDOW.to_le_bytes().to_vec();
let mut arena_types = std::collections::HashMap::new();
arena_types.insert(
OUT_OF_WINDOW,
ArenaResolveHit {
target_type_id: task_ctx_id,
header_skip: 0,
},
);
let reader = CastStubReader {
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_type_at: arena_types,
..Default::default()
};
let v = render_value_with_mem(&btf, outer_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
ref cast_annotation,
..
} = members[0].value
else {
panic!("data field must render as Ptr; got {:?}", members[0].value);
};
assert!(
deref.is_none(),
"out-of-window pointer must not chase via the bridge"
);
assert!(
cast_annotation.is_none(),
"BTF Type::Ptr arm must leave cast_annotation None on the kptr branch"
);
let _ = deref_skipped_reason;
}
#[test]
fn cross_btf_fwd_resolve_renders_cgx_body_through_sibling_btf() {
use std::sync::Arc;
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut s_a = vec![0u8];
let n_a_u64 = push(&mut s_a, "u64");
let n_a_outer = push(&mut s_a, "outer");
let n_a_field = push(&mut s_a, "cgx_raw");
let n_a_cgx = push(&mut s_a, "cgx_target");
let types_a = vec![
CastSynType::Int {
name_off: n_a_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_a_outer,
size: 8,
members: vec![CastSynMember {
name_off: n_a_field,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_a_cgx,
is_union: false,
},
];
let blob_a = cast_build_btf(&types_a, &s_a);
let btf_entry = Btf::from_bytes(&blob_a).expect("entry BTF parses");
let mut s_b = vec![0u8];
let n_b_u64 = push(&mut s_b, "u64");
let n_b_cgx = push(&mut s_b, "cgx_target");
let n_b_marker = push(&mut s_b, "marker");
let types_b = vec![
CastSynType::Int {
name_off: n_b_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_b_cgx,
size: 8,
members: vec![CastSynMember {
name_off: n_b_marker,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob_b = cast_build_btf(&types_b, &s_b);
let btf_sibling = Arc::new(Btf::from_bytes(&blob_b).expect("sibling BTF parses"));
const ARENA_LO: u64 = 0x10_0000_0000;
const ARENA_HI: u64 = 0x10_0001_0000;
const TARGET_ADDR: u64 = 0x10_0000_2000;
let outer_id = 2u32;
let cgx_fwd_id = 3u32;
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(outer_id, 0),
CastHit {
alloc_size: None,
target_type_id: cgx_fwd_id,
addr_space: AddrSpace::Arena,
},
);
let outer_bytes = TARGET_ADDR.to_le_bytes().to_vec();
let mut arena_bytes = std::collections::HashMap::new();
arena_bytes.insert(TARGET_ADDR, 0xCAFEu64.to_le_bytes().to_vec());
let mut cross_btf_index = std::collections::HashMap::new();
cross_btf_index.insert("cgx_target".to_string(), (0usize, 2u32, true));
let reader = CastStubReader {
cast_map: Some(cast_map),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: arena_bytes,
cross_btf_btfs: vec![btf_sibling.clone()],
cross_btf_index,
..Default::default()
};
let v = render_value_with_mem(&btf_entry, outer_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr { ref deref, .. } = members[0].value else {
panic!("cgx_raw must render as Ptr; got {:?}", members[0].value);
};
let inner = deref
.as_ref()
.expect("cross-BTF Fwd resolve must produce a deref (sibling BTF body), but got None");
let RenderedValue::Struct {
ref type_name,
ref members,
} = **inner
else {
panic!("inner must be Struct (cgx_target body); got {inner:?}");
};
assert_eq!(
type_name.as_deref(),
Some("cgx_target"),
"rendered subtree must carry the sibling BTF's struct name"
);
assert_eq!(members.len(), 1);
assert_eq!(members[0].name, "marker");
let RenderedValue::Uint { value: marker, .. } = members[0].value else {
panic!("marker must render as Uint; got {:?}", members[0].value);
};
assert_eq!(
marker, 0xCAFE,
"rendered marker must come from the cross-BTF body's bytes"
);
let reader_no_bridge = CastStubReader {
cast_map: Some({
let mut m = super::super::cast_analysis::CastMap::new();
m.insert(
(outer_id, 0),
CastHit {
alloc_size: None,
target_type_id: cgx_fwd_id,
addr_space: AddrSpace::Arena,
},
);
m
}),
arena_window: Some((ARENA_LO, ARENA_HI)),
arena_bytes_at: {
let mut a = std::collections::HashMap::new();
a.insert(TARGET_ADDR, 0xCAFEu64.to_le_bytes().to_vec());
a
},
..Default::default()
};
let v = render_value_with_mem(&btf_entry, outer_id, &outer_bytes, &reader_no_bridge);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!("cgx_raw must render as Ptr; got {:?}", members[0].value);
};
assert!(
deref.is_none(),
"without cross-BTF bridge, Fwd target must not chase"
);
let reason = deref_skipped_reason
.as_ref()
.expect("Fwd skip must populate deref_skipped_reason");
assert!(
reason.contains("cgx_target") && reason.contains("forward declaration"),
"skip reason must name the Fwd target: {reason:?}"
);
}
#[test]
fn cast_chase_kernel_cross_btf_fwd_resolve_succeeds() {
use std::sync::Arc;
let push = |s: &mut Vec<u8>, name: &str| -> u32 {
let off = s.len() as u32;
s.extend_from_slice(name.as_bytes());
s.push(0);
off
};
let mut s_entry = vec![0u8];
let n_e_u64 = push(&mut s_entry, "u64");
let n_e_t = push(&mut s_entry, "T");
let n_e_f = push(&mut s_entry, "f");
let n_e_kern = push(&mut s_entry, "kern_target");
let types_entry = vec![
CastSynType::Int {
name_off: n_e_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_e_t,
size: 8,
members: vec![CastSynMember {
name_off: n_e_f,
type_id: 1,
byte_offset: 0,
}],
},
CastSynType::Fwd {
name_off: n_e_kern,
is_union: false,
},
];
let blob_entry = cast_build_btf(&types_entry, &s_entry);
let btf_entry = Btf::from_bytes(&blob_entry).expect("entry BTF parses");
let t_id: u32 = 2;
let kern_fwd_id: u32 = 3;
let mut s_sib = vec![0u8];
let n_s_u64 = push(&mut s_sib, "u64");
let n_s_kern = push(&mut s_sib, "kern_target");
let n_s_marker = push(&mut s_sib, "marker");
let types_sib = vec![
CastSynType::Int {
name_off: n_s_u64,
size: 8,
encoding: 0,
offset: 0,
bits: 64,
},
CastSynType::Struct {
name_off: n_s_kern,
size: 8,
members: vec![CastSynMember {
name_off: n_s_marker,
type_id: 1,
byte_offset: 0,
}],
},
];
let blob_sib = cast_build_btf(&types_sib, &s_sib);
let btf_sib = Arc::new(Btf::from_bytes(&blob_sib).expect("sibling BTF parses"));
const KVA: u64 = 0xffff_8000_0001_2000;
let outer_bytes = KVA.to_le_bytes().to_vec();
let inner_bytes = 0xBEEFu64.to_le_bytes().to_vec();
let mut kva_bytes = std::collections::HashMap::new();
kva_bytes.insert(KVA, inner_bytes);
let mut cross_btf_index = std::collections::HashMap::new();
cross_btf_index.insert("kern_target".to_string(), (0usize, 2u32, true));
let mut cast_map = super::super::cast_analysis::CastMap::new();
cast_map.insert(
(t_id, 0),
CastHit {
alloc_size: None,
target_type_id: kern_fwd_id,
addr_space: AddrSpace::Kernel,
},
);
let reader = CastStubReader {
cast_map: Some(cast_map),
kva_bytes_at: kva_bytes,
cross_btf_btfs: vec![btf_sib.clone()],
cross_btf_index,
..Default::default()
};
let v = render_value_with_mem(&btf_entry, t_id, &outer_bytes, &reader);
let RenderedValue::Struct { ref members, .. } = v else {
panic!("expected outer Struct render, got {v:?}");
};
let RenderedValue::Ptr {
ref deref,
ref deref_skipped_reason,
..
} = members[0].value
else {
panic!(
"kernel cast intercept must surface as Ptr; got {:?}",
members[0].value
);
};
assert!(
deref_skipped_reason.is_none(),
"kernel-arm cross-BTF Fwd resolve must succeed; \
got skip reason {deref_skipped_reason:?}"
);
let inner = deref
.as_ref()
.expect("kernel-arm cross-BTF Fwd resolve must produce a deref");
let RenderedValue::Struct {
type_name: ref inner_name,
members: ref inner_members,
} = **inner
else {
panic!("deref payload must be the kern_target body Struct; got {inner:?}");
};
assert_eq!(
inner_name.as_deref(),
Some("kern_target"),
"rendered subtree must carry the sibling BTF's struct name",
);
assert_eq!(inner_members.len(), 1);
assert_eq!(inner_members[0].name, "marker");
let RenderedValue::Uint { value: marker, .. } = inner_members[0].value else {
panic!(
"kern_target.marker must render as Uint; got {:?}",
inner_members[0].value
);
};
assert_eq!(
marker, 0xBEEF,
"rendered marker must come from the kva-side body bytes \
decoded against the sibling BTF",
);
}