workflow_perf_monitor/mem/
allocation_counter.rs

1use std::{
2    alloc::{GlobalAlloc, Layout, System},
3    sync::atomic::{AtomicBool, AtomicIsize, Ordering},
4};
5
6pub const MIN_ALIGN: usize = 16; // module `sys_common` is private. https://doc.rust-lang.org/src/std/sys_common/alloc.rs.html#28
7
8static ALLOCATED: AtomicIsize = AtomicIsize::new(0);
9static ENABLE: AtomicBool = AtomicBool::new(false);
10
11/// An allocator tracks inuse allocated bytes.
12///
13/// The counter is disable by default. Please enable it by `CountingAllocator::enable()` then call `CountingAllocator::get_allocated()` will return the bytes inused.
14pub struct CountingAllocator;
15
16impl CountingAllocator {
17    /// Get the inuse bytes allocated by rust.
18    pub fn get_allocated() -> isize {
19        ALLOCATED.load(Ordering::SeqCst)
20    }
21
22    /// Check whether the counter is enable.
23    pub fn is_enable() -> bool {
24        ENABLE.load(Ordering::SeqCst)
25    }
26
27    /// Reset the counter.
28    pub fn reset() {
29        ALLOCATED.store(0, Ordering::SeqCst)
30    }
31
32    /// Enable the counter.
33    pub fn enable() {
34        ENABLE.store(true, Ordering::SeqCst)
35    }
36
37    /// Disable the counter.
38    pub fn disable() {
39        ENABLE.store(false, Ordering::SeqCst)
40    }
41}
42
43unsafe impl GlobalAlloc for CountingAllocator {
44    #[inline]
45    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
46        let ret = System.alloc(layout);
47        if !ret.is_null() && Self::is_enable() {
48            ALLOCATED.fetch_add(layout.size() as isize, Ordering::SeqCst);
49        }
50        ret
51    }
52
53    #[inline]
54    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
55        System.dealloc(ptr, layout);
56        if Self::is_enable() {
57            ALLOCATED.fetch_sub(layout.size() as isize, Ordering::SeqCst);
58        }
59    }
60
61    #[inline]
62    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
63        let ret: *mut u8 = System.realloc(ptr, layout, new_size);
64        if !ret.is_null()
65            && Self::is_enable()
66            && layout.align() <= MIN_ALIGN
67            && layout.align() <= new_size
68        {
69            ALLOCATED.fetch_add(new_size as isize - layout.size() as isize, Ordering::SeqCst);
70        }
71        ret
72    }
73
74    #[inline]
75    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
76        let ret = System.alloc_zeroed(layout);
77        if !ret.is_null() && Self::is_enable() {
78            ALLOCATED.fetch_add(layout.size() as isize, Ordering::SeqCst);
79        }
80        ret
81    }
82}
83#[cfg(feature = "allocation_counter")]
84#[global_allocator]
85static _COUNTER: perf_monitor::mem::CountingAllocator = perf_monitor::mem::CountingAllocator;