1#![deny(
24 missing_debug_implementations,
25 missing_copy_implementations,
26 trivial_casts,
27 trivial_numeric_casts,
28 unused_import_braces,
29 unused_imports,
30 unused_qualifications,
31 missing_docs
32)]
33#![cfg_attr(doc_cfg, feature(allocator_api))]
34#![cfg_attr(doc_cfg, feature(doc_cfg))]
35
36use std::{
37 alloc::{GlobalAlloc, Layout, System},
38 sync::atomic::{AtomicIsize, AtomicUsize, Ordering},
39};
40
41pub trait PeakMemAllocTrait {
46 fn reset_peak_memory(&self);
48
49 fn get_peak_memory(&self) -> usize;
53}
54
55#[derive(Default, Debug)]
57pub struct PeakMemAlloc<T: GlobalAlloc> {
58 peak_bytes_allocated_tracker: AtomicIsize,
59 peak_bytes_allocated: AtomicUsize,
60 inner: T,
61}
62
63pub static INSTRUMENTED_SYSTEM: PeakMemAlloc<System> = PeakMemAlloc {
65 peak_bytes_allocated_tracker: AtomicIsize::new(0),
66 peak_bytes_allocated: AtomicUsize::new(0),
67 inner: System,
68};
69
70impl PeakMemAlloc<System> {
71 pub const fn system() -> Self {
73 PeakMemAlloc {
74 peak_bytes_allocated_tracker: AtomicIsize::new(0),
75 peak_bytes_allocated: AtomicUsize::new(0),
76 inner: System,
77 }
78 }
79}
80
81impl<T: GlobalAlloc> PeakMemAllocTrait for PeakMemAlloc<T> {
82 #[inline]
84 fn reset_peak_memory(&self) {
85 self.peak_bytes_allocated.store(0, Ordering::SeqCst);
86 self.peak_bytes_allocated_tracker.store(0, Ordering::SeqCst);
87 }
88
89 #[inline]
91 fn get_peak_memory(&self) -> usize {
92 self.peak_bytes_allocated.load(Ordering::SeqCst)
93 }
94}
95impl<T: GlobalAlloc> PeakMemAlloc<T> {
96 pub const fn new(inner: T) -> Self {
99 PeakMemAlloc {
100 peak_bytes_allocated_tracker: AtomicIsize::new(0),
101 peak_bytes_allocated: AtomicUsize::new(0),
102 inner,
103 }
104 }
105
106 #[inline]
107 fn track_alloc(&self, bytes: usize) {
108 let prev = self
109 .peak_bytes_allocated_tracker
110 .fetch_add(bytes as isize, Ordering::SeqCst);
111 let current_peak = (prev + bytes as isize).max(0) as usize;
112 self.peak_bytes_allocated
113 .fetch_max(current_peak, Ordering::SeqCst);
114 }
115
116 #[inline]
117 fn track_dealloc(&self, bytes: usize) {
118 self.peak_bytes_allocated_tracker
119 .fetch_sub(bytes as isize, Ordering::SeqCst);
120 }
121}
122
123unsafe impl<'a, T: GlobalAlloc + 'a> GlobalAlloc for &'a PeakMemAlloc<T> {
124 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
125 (*self).alloc(layout)
126 }
127
128 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
129 (*self).dealloc(ptr, layout)
130 }
131
132 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
133 (*self).alloc_zeroed(layout)
134 }
135
136 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
137 (*self).realloc(ptr, layout, new_size)
138 }
139}
140
141unsafe impl<T: GlobalAlloc> GlobalAlloc for PeakMemAlloc<T> {
142 #[inline]
143 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
144 self.track_alloc(layout.size());
145 self.inner.alloc(layout)
146 }
147
148 #[inline]
149 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
150 self.track_dealloc(layout.size());
151 self.inner.dealloc(ptr, layout)
152 }
153
154 #[inline]
155 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
156 self.track_alloc(layout.size());
157 self.inner.alloc_zeroed(layout)
158 }
159
160 #[inline]
161 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
162 match new_size.cmp(&layout.size()) {
163 std::cmp::Ordering::Greater => {
164 let difference = new_size - layout.size();
165 self.track_alloc(difference);
166 }
167 std::cmp::Ordering::Less => {
168 let difference = layout.size() - new_size;
169 self.track_dealloc(difference);
170 }
171 _ => {}
172 }
173
174 self.inner.realloc(ptr, layout, new_size)
175 }
176}