memscope-rs 0.2.3

A memory tracking library for Rust applications.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
//! Custom global allocator for tracking memory allocations.

use std::alloc::{GlobalAlloc, Layout, System};

/// A custom allocator that tracks memory allocations and deallocations.
///
/// This allocator wraps the system allocator and records all allocation
/// and deallocation events through the global memory tracker.
pub struct TrackingAllocator;

impl TrackingAllocator {
    /// Create a new tracking allocator instance.
    pub const fn new() -> Self {
        Self
    }

    /// Simple type inference using static strings to avoid recursive allocations
    fn _infer_type_from_allocation_context(size: usize) -> &'static str {
        // CRITICAL FIX: Use static strings to prevent recursive allocations
        match size {
            // Common Rust type sizes
            1 => "u8",
            2 => "u16",
            4 => "u32",
            8 => "u64",
            16 => "u128",

            // String and Vec common sizes
            24 => "String",
            32 => "Vec<T>",
            48 => "HashMap<K,V>",

            // Smart pointer sizes
            size if size == std::mem::size_of::<std::sync::Arc<String>>() => "Arc<T>",
            size if size == std::mem::size_of::<std::rc::Rc<String>>() => "Rc<T>",
            size if size == std::mem::size_of::<Box<String>>() => "Box<T>",

            // Default for other sizes - use static strings
            _ => "unknown",
        }
    }

    // REMOVED: fallback_type_inference - no longer needed with static strings

    /// Get a simplified call stack for context
    fn _get_simplified_call_stack() -> Vec<String> {
        // For now, return a simple placeholder
        // In a real implementation, this could use backtrace crate
        vec!["global_allocator".to_string(), "system_alloc".to_string()]
    }

    /// Simple variable name inference using static strings to avoid recursive allocations
    fn _infer_variable_from_allocation_context(size: usize) -> &'static str {
        // CRITICAL FIX: Use static strings to prevent recursive allocations
        match size {
            // Small allocations - likely primitives
            1..=8 => "primitive_data",

            // Medium allocations - likely structs or small collections
            9..=64 => "struct_data",

            // Large allocations - likely collections or buffers
            65..=1024 => "collection_data",

            // Very large allocations - likely buffers or large data structures
            _ => "buffer_data",
        }
    }
}

// Thread-local flag to prevent recursive tracking
thread_local! {
    static TRACKING_DISABLED: std::cell::Cell<bool> = const { std::cell::Cell::new(false) };
}

unsafe impl GlobalAlloc for TrackingAllocator {
    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
        // Allocate memory first
        let ptr = System.alloc(layout);

        // Track the allocation if it succeeded and tracking is not disabled
        if !ptr.is_null() {
            // Check if tracking is disabled for this thread to prevent recursion
            let should_track = TRACKING_DISABLED.with(|disabled| !disabled.get());

            if should_track {
                // Temporarily disable tracking to prevent recursion during tracking operations
                TRACKING_DISABLED.with(|disabled| disabled.set(true));

                // CRITICAL FIX: Use simple tracking like master branch to avoid recursion
                if let Ok(tracker) = std::panic::catch_unwind(crate::core::tracker::get_tracker) {
                    // Simple tracking without context to prevent recursive allocations
                    let _ = tracker.track_allocation(ptr as usize, layout.size());
                }

                // Re-enable tracking
                TRACKING_DISABLED.with(|disabled| disabled.set(false));
            }
        }

        ptr
    }

    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
        // Track the deallocation first
        let should_track = TRACKING_DISABLED.with(|disabled| !disabled.get());

        if should_track {
            // Temporarily disable tracking to prevent recursion
            TRACKING_DISABLED.with(|disabled| disabled.set(true));

            // Track the deallocation - use try_lock approach to avoid deadlocks
            if let Ok(tracker) = std::panic::catch_unwind(crate::core::tracker::get_tracker) {
                // Ignore errors to prevent deallocation failures from breaking the program
                let _ = tracker.track_deallocation(ptr as usize);
            }

            // Re-enable tracking
            TRACKING_DISABLED.with(|disabled| disabled.set(false));
        }

        // Deallocate the memory
        System.dealloc(ptr, layout);
    }
}

impl Default for TrackingAllocator {
    fn default() -> Self {
        Self::new()
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use std::alloc::{GlobalAlloc, Layout};
    use std::sync::atomic::{AtomicBool, Ordering};
    use std::sync::Once;

    // Helper to reset thread-local state between tests
    fn reset_thread_local_state() {
        TRACKING_DISABLED.with(|disabled| disabled.set(false));
    }

    #[test]
    fn test_allocation_tracking() {
        let allocator = TrackingAllocator::new();
        let layout = Layout::from_size_align(1024, 8).unwrap();

        unsafe {
            let ptr = allocator.alloc(layout);
            assert!(!ptr.is_null());

            // Test deallocation
            allocator.dealloc(ptr, layout);
        }
    }

    #[test]
    fn test_zero_sized_allocation() {
        let allocator = TrackingAllocator::new();
        let layout = Layout::from_size_align(0, 1).unwrap();

        unsafe {
            let ptr = allocator.alloc(layout);
            // Zero-sized allocations may return null or a valid pointer
            // Both are acceptable according to the GlobalAlloc trait
            allocator.dealloc(ptr, layout);
        }
    }

    #[test]
    fn test_large_allocation() {
        let allocator = TrackingAllocator::new();
        let layout = Layout::from_size_align(1024 * 1024, 8).unwrap(); // 1MB

        unsafe {
            let ptr = allocator.alloc(layout);
            if !ptr.is_null() {
                // Only test deallocation if allocation succeeded
                allocator.dealloc(ptr, layout);
            }
        }
    }

    #[test]
    fn test_multiple_allocations() {
        let allocator = TrackingAllocator::new();
        let mut ptrs = Vec::new();

        // Allocate multiple blocks
        for i in 1..=10 {
            let layout = Layout::from_size_align(i * 64, 8).unwrap();
            unsafe {
                let ptr = allocator.alloc(layout);
                if !ptr.is_null() {
                    ptrs.push((ptr, layout));
                }
            }
        }

        // Deallocate all blocks
        for (ptr, layout) in ptrs {
            unsafe {
                allocator.dealloc(ptr, layout);
            }
        }
    }

    #[test]
    fn test_type_inference_from_size() {
        // Test the static type inference
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(1),
            "u8"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(4),
            "u32"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(8),
            "u64"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(24),
            "String"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(32),
            "Vec<T>"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(999),
            "unknown"
        );
    }

    #[test]
    fn test_variable_inference_from_size() {
        // Test the static variable inference
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(4),
            "primitive_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(32),
            "struct_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(512),
            "collection_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(2048),
            "buffer_data"
        );
    }

    #[test]
    fn test_default_implementation() {
        let allocator = TrackingAllocator::new();
        assert_eq!(
            std::mem::size_of_val(&allocator),
            std::mem::size_of::<TrackingAllocator>()
        );
    }

    #[test]
    fn test_type_inference() {
        // Test type inference for various sizes
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(1),
            "u8"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(2),
            "u16"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(4),
            "u32"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(8),
            "u64"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(16),
            "u128"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(24),
            "String"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(32),
            "Vec<T>"
        );
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(48),
            "HashMap<K,V>"
        );

        // Test unknown size
        assert_eq!(
            TrackingAllocator::_infer_type_from_allocation_context(12345),
            "unknown"
        );
    }

    #[test]
    fn test_variable_inference() {
        // Test variable inference for different size ranges
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(0),
            "buffer_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(4),
            "primitive_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(8),
            "primitive_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(16),
            "struct_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(32),
            "struct_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(64),
            "struct_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(65),
            "collection_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(128),
            "collection_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(1024),
            "collection_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(1025),
            "buffer_data"
        );
        assert_eq!(
            TrackingAllocator::_infer_variable_from_allocation_context(usize::MAX),
            "buffer_data"
        );
    }

    #[test]
    fn test_thread_local_tracking() {
        reset_thread_local_state();

        // Test that tracking is enabled by default
        TRACKING_DISABLED.with(|disabled| {
            assert!(!disabled.get());
        });

        // Test disabling tracking
        TRACKING_DISABLED.with(|disabled| {
            disabled.set(true);
            assert!(disabled.get());
            disabled.set(false);
        });
    }

    #[test]
    fn test_simplified_call_stack() {
        let stack = TrackingAllocator::_get_simplified_call_stack();
        assert_eq!(stack.len(), 2);
        assert_eq!(stack[0], "global_allocator");
        assert_eq!(stack[1], "system_alloc");
    }

    #[test]
    fn test_allocation_edge_cases() {
        let allocator = TrackingAllocator::new();

        // Test with maximum alignment
        let max_align = std::mem::size_of::<usize>() * 2;
        let layout = Layout::from_size_align(16, max_align).unwrap();

        unsafe {
            let ptr = allocator.alloc(layout);
            if !ptr.is_null() {
                // Test that the pointer is properly aligned
                assert_eq!((ptr as usize) % max_align, 0);
                allocator.dealloc(ptr, layout);
            }
        }

        // Test with minimal size but non-zero
        let layout = Layout::from_size_align(1, 1).unwrap();
        unsafe {
            let ptr = allocator.alloc(layout);
            if !ptr.is_null() {
                allocator.dealloc(ptr, layout);
            }
        }
    }

    #[test]
    fn test_recursive_allocation_handling() {
        // This test verifies that recursive allocations don't cause infinite loops
        let allocator = TrackingAllocator::new();
        let layout = Layout::from_size_align(64, 8).unwrap();

        // Set up a flag to detect if we're in a recursive call
        static RECURSION_DETECTED: AtomicBool = AtomicBool::new(false);
        static INIT: Once = Once::new();

        INIT.call_once(|| {
            // Install a panic hook to detect if we hit a stack overflow
            let original_hook = std::panic::take_hook();
            std::panic::set_hook(Box::new(move |panic_info| {
                if let Some(s) = panic_info.payload().downcast_ref::<&str>() {
                    if s.contains("stack overflow") {
                        RECURSION_DETECTED.store(true, Ordering::SeqCst);
                    }
                }
                original_hook(panic_info);
            }));
        });

        // This allocation will trigger tracking, but the thread-local flag should prevent recursion
        unsafe {
            let ptr = allocator.alloc(layout);
            if !ptr.is_null() {
                allocator.dealloc(ptr, layout);
            }
        }

        // Verify we didn't hit a stack overflow
        assert!(
            !RECURSION_DETECTED.load(Ordering::SeqCst),
            "Recursive allocation detected - thread-local tracking failed"
        );
    }
}