memscope_rs/analysis/relation_inference/
pointer_scan.rs1use crate::analysis::is_virtual_pointer;
16use crate::analysis::relation_inference::{RangeMap, Relation, RelationEdge};
17use crate::analysis::unsafe_inference::{is_valid_ptr, OwnedMemoryView};
18
19const MIN_VALID_POINTER: usize = 0x1000;
20
21const POINTER_ALIGNMENT: usize = 8;
24
25pub struct InferenceRecord {
27 pub id: usize,
29 pub ptr: usize,
31 pub size: usize,
33 pub memory: Option<OwnedMemoryView>,
35 pub type_kind: crate::analysis::unsafe_inference::TypeKind,
37 pub confidence: u8,
39 pub call_stack_hash: Option<u64>,
41 pub alloc_time: u64,
43}
44
45pub fn detect_owner(record: &InferenceRecord, range_map: &RangeMap) -> Vec<RelationEdge> {
60 detect_owner_impl(record, range_map, false)
61}
62
63fn detect_owner_impl(
64 record: &InferenceRecord,
65 range_map: &RangeMap,
66 skip_validation: bool,
67) -> Vec<RelationEdge> {
68 let mut relations = Vec::new();
69 let mut seen_targets = std::collections::HashSet::new();
70
71 let memory = match &record.memory {
72 Some(m) => m,
73 None => return relations,
74 };
75
76 let ptr_size = std::mem::size_of::<usize>();
77 if memory.len() < ptr_size {
78 return relations;
79 }
80
81 for offset in (0..memory.len()).step_by(ptr_size) {
82 if offset + ptr_size > memory.len() {
83 break;
84 }
85
86 let ptr_val = memory.read_usize(offset);
87 let Some(ptr_val) = ptr_val else {
88 continue;
89 };
90
91 if ptr_val == 0 || ptr_val < MIN_VALID_POINTER {
92 continue;
93 }
94
95 if is_virtual_pointer(ptr_val) {
97 continue;
98 }
99
100 if ptr_val % POINTER_ALIGNMENT != 0 {
101 continue;
102 }
103
104 if !skip_validation && !is_valid_ptr(ptr_val) {
106 continue;
107 }
108
109 if let Some(target_id) = range_map.find_containing(ptr_val) {
110 if target_id == record.id {
111 continue;
112 }
113 if seen_targets.insert(target_id) {
114 relations.push(RelationEdge {
115 from: record.id,
116 to: target_id,
117 relation: Relation::Owns,
118 });
119 }
120 }
121 }
122
123 relations
124}
125
126#[cfg(test)]
127mod tests {
128 use super::*;
129 use crate::analysis::unsafe_inference::TypeKind;
130 use crate::snapshot::types::ActiveAllocation;
131
132 fn make_record(id: usize, ptr: usize, size: usize, memory: Vec<u8>) -> InferenceRecord {
133 InferenceRecord {
134 id,
135 ptr,
136 size,
137 memory: Some(OwnedMemoryView::new(memory)),
138 type_kind: TypeKind::Unknown,
139 confidence: 0,
140 call_stack_hash: None,
141 alloc_time: 0,
142 }
143 }
144
145 fn make_alloc(ptr: usize, size: usize) -> ActiveAllocation {
146 ActiveAllocation {
147 ptr: Some(ptr),
148 size,
149 kind: crate::core::types::TrackKind::HeapOwner { ptr, size },
150 allocated_at: 0,
151 var_name: None,
152 type_name: None,
153 thread_id: 0,
154 call_stack_hash: None,
155 }
156 }
157
158 #[test]
159 #[cfg(target_os = "macos")]
160 fn test_detect_owner_basic() {
161 let target_ptr: usize = 0x5000;
162 let mut mem = vec![0u8; 24];
163 mem[0..8].copy_from_slice(&target_ptr.to_le_bytes());
164
165 let record = make_record(0, 0x1000, 24, mem);
166 let allocs = vec![make_alloc(0x1000, 24), make_alloc(0x5000, 1024)];
167 let range_map = RangeMap::new(&allocs);
168
169 let edges = detect_owner_impl(&record, &range_map, true);
170 assert_eq!(edges.len(), 1);
171 assert_eq!(edges[0].from, 0);
172 assert_eq!(edges[0].to, 1);
173 assert_eq!(edges[0].relation, Relation::Owns);
174 }
175
176 #[test]
177 fn test_detect_owner_no_memory() {
178 let record = InferenceRecord {
179 id: 0,
180 ptr: 0x1000,
181 size: 24,
182 memory: None,
183 type_kind: TypeKind::Unknown,
184 confidence: 0,
185 call_stack_hash: None,
186 alloc_time: 0,
187 };
188 let range_map = RangeMap::new(&[]);
189 let edges = detect_owner_impl(&record, &range_map, true);
190 assert!(edges.is_empty());
191 }
192
193 #[test]
194 fn test_detect_owner_no_valid_pointers() {
195 let record = make_record(0, 0x1000, 24, vec![0u8; 24]);
196 let allocs = vec![make_alloc(0x5000, 100)];
197 let range_map = RangeMap::new(&allocs);
198
199 let edges = detect_owner_impl(&record, &range_map, true);
200 assert!(edges.is_empty());
201 }
202
203 #[test]
204 #[cfg(target_os = "macos")]
205 fn test_detect_owner_multiple_pointers() {
206 let ptr1: usize = 0x5000;
207 let ptr2: usize = 0x6000;
208 let mut mem = vec![0u8; 24];
209 mem[0..8].copy_from_slice(&ptr1.to_le_bytes());
210 mem[8..16].copy_from_slice(&ptr2.to_le_bytes());
211
212 let record = make_record(0, 0x1000, 24, mem);
213 let allocs = vec![
214 make_alloc(0x1000, 24),
215 make_alloc(0x5000, 100),
216 make_alloc(0x6000, 100),
217 ];
218 let range_map = RangeMap::new(&allocs);
219
220 let edges = detect_owner_impl(&record, &range_map, true);
221 assert_eq!(edges.len(), 2);
222 }
223
224 #[test]
225 fn test_detect_owner_no_self_reference() {
226 let self_ptr: usize = 0x1000;
227 let mut mem = vec![0u8; 24];
228 mem[0..8].copy_from_slice(&self_ptr.to_le_bytes());
229
230 let record = make_record(0, 0x1000, 24, mem);
231 let allocs = vec![make_alloc(0x1000, 24)];
232 let range_map = RangeMap::new(&allocs);
233
234 let edges = detect_owner_impl(&record, &range_map, true);
235 assert!(edges.is_empty());
236 }
237
238 #[test]
239 fn test_detect_owner_small_memory() {
240 let record = make_record(0, 0x1000, 4, vec![0u8; 4]);
241 let range_map = RangeMap::new(&[]);
242 let edges = detect_owner_impl(&record, &range_map, true);
243 assert!(edges.is_empty());
244 }
245
246 #[test]
247 #[cfg(target_os = "macos")]
248 fn test_detect_owner_duplicate_pointer_same_target() {
249 let target_ptr: usize = 0x5000;
250 let mut mem = vec![0u8; 24];
251 mem[0..8].copy_from_slice(&target_ptr.to_le_bytes());
252 mem[8..16].copy_from_slice(&target_ptr.to_le_bytes());
253
254 let record = make_record(0, 0x1000, 24, mem);
255 let allocs = vec![make_alloc(0x1000, 24), make_alloc(0x5000, 100)];
256 let range_map = RangeMap::new(&allocs);
257
258 let edges = detect_owner_impl(&record, &range_map, true);
259 assert_eq!(edges.len(), 1);
260 assert_eq!(edges[0].to, 1);
261 assert_eq!(edges[0].from, 0);
262 }
263
264 #[test]
265 fn test_detect_owner_unaligned_pointer_rejected() {
266 let mut mem = vec![0u8; 24];
268 let unaligned_ptr: usize = 0x5003; mem[0..8].copy_from_slice(&unaligned_ptr.to_le_bytes());
270
271 let record = make_record(0, 0x1000, 24, mem);
272 let allocs = vec![make_alloc(0x1000, 24), make_alloc(0x5000, 100)];
273 let range_map = RangeMap::new(&allocs);
274
275 let edges = detect_owner_impl(&record, &range_map, true);
276 assert!(edges.is_empty(), "Unaligned pointer should be rejected");
277 }
278
279 #[test]
280 fn test_detect_owner_pointer_to_gap_rejected() {
281 let gap_ptr: usize = 0x5500; let mut mem = vec![0u8; 24];
284 mem[0..8].copy_from_slice(&gap_ptr.to_le_bytes());
285
286 let record = make_record(0, 0x1000, 24, mem);
287 let allocs = vec![
288 make_alloc(0x1000, 24),
289 make_alloc(0x5000, 100),
290 make_alloc(0x6000, 100),
291 ];
292 let range_map = RangeMap::new(&allocs);
293
294 let edges = detect_owner_impl(&record, &range_map, true);
295 assert!(
296 edges.is_empty(),
297 "Pointer to gap should not match any allocation"
298 );
299 }
300
301 #[test]
302 #[cfg(target_os = "macos")]
303 fn test_detect_owner_multiple_different_targets() {
304 let ptr1: usize = 0x5000;
306 let ptr2: usize = 0x6000;
307 let ptr3: usize = 0x7000;
308 let mut mem = vec![0u8; 32];
309 mem[0..8].copy_from_slice(&ptr1.to_le_bytes());
310 mem[8..16].copy_from_slice(&ptr2.to_le_bytes());
311 mem[16..24].copy_from_slice(&ptr3.to_le_bytes());
312
313 let record = make_record(0, 0x1000, 32, mem);
314 let allocs = vec![
315 make_alloc(0x1000, 32),
316 make_alloc(0x5000, 100),
317 make_alloc(0x6000, 100),
318 make_alloc(0x7000, 100),
319 ];
320 let range_map = RangeMap::new(&allocs);
321
322 let edges = detect_owner_impl(&record, &range_map, true);
323 assert_eq!(edges.len(), 3);
324 let targets: Vec<_> = edges.iter().map(|e| e.to).collect();
325 assert!(targets.contains(&1));
326 assert!(targets.contains(&2));
327 assert!(targets.contains(&3));
328 }
329
330 #[test]
331 fn test_detect_owner_null_pointer_skipped() {
332 let mut mem = vec![0u8; 24];
333 let valid_ptr: usize = 0x5000;
335 mem[8..16].copy_from_slice(&valid_ptr.to_le_bytes());
336
337 let record = make_record(0, 0x1000, 24, mem);
338 let allocs = vec![make_alloc(0x1000, 24), make_alloc(0x5000, 100)];
339 let range_map = RangeMap::new(&allocs);
340
341 let edges = detect_owner_impl(&record, &range_map, true);
342 assert_eq!(edges.len(), 1);
343 assert_eq!(edges[0].to, 1);
344 }
345
346 #[test]
347 fn test_detect_owner_low_address_skipped() {
348 let mut mem = vec![0u8; 24];
349 let low_ptr: usize = 0x100;
351 mem[0..8].copy_from_slice(&low_ptr.to_le_bytes());
352
353 let record = make_record(0, 0x1000, 24, mem);
354 let allocs = vec![make_alloc(0x100, 100), make_alloc(0x1000, 24)];
355 let range_map = RangeMap::new(&allocs);
356
357 let edges = detect_owner_impl(&record, &range_map, true);
358 assert!(edges.is_empty(), "Low address pointer should be skipped");
359 }
360}