laminar_core/alloc/
detector.rs1#[cfg(feature = "allocation-tracking")]
7use std::alloc::{GlobalAlloc, Layout, System};
8#[cfg(feature = "allocation-tracking")]
9use std::cell::Cell;
10use std::sync::atomic::{AtomicU64, Ordering};
11
12#[derive(Debug, Default, Clone, Copy)]
14pub struct AllocationStats {
15 pub hot_path_allocations: u64,
17 pub hot_path_bytes: u64,
19 pub normal_allocations: u64,
21 pub normal_bytes: u64,
23}
24
25static HOT_PATH_ALLOC_COUNT: AtomicU64 = AtomicU64::new(0);
27static HOT_PATH_ALLOC_BYTES: AtomicU64 = AtomicU64::new(0);
28static NORMAL_ALLOC_COUNT: AtomicU64 = AtomicU64::new(0);
29static NORMAL_ALLOC_BYTES: AtomicU64 = AtomicU64::new(0);
30
31impl AllocationStats {
32 #[must_use]
34 pub fn current() -> Self {
35 Self {
36 hot_path_allocations: HOT_PATH_ALLOC_COUNT.load(Ordering::Relaxed),
37 hot_path_bytes: HOT_PATH_ALLOC_BYTES.load(Ordering::Relaxed),
38 normal_allocations: NORMAL_ALLOC_COUNT.load(Ordering::Relaxed),
39 normal_bytes: NORMAL_ALLOC_BYTES.load(Ordering::Relaxed),
40 }
41 }
42
43 pub fn reset() {
45 HOT_PATH_ALLOC_COUNT.store(0, Ordering::Relaxed);
46 HOT_PATH_ALLOC_BYTES.store(0, Ordering::Relaxed);
47 NORMAL_ALLOC_COUNT.store(0, Ordering::Relaxed);
48 NORMAL_ALLOC_BYTES.store(0, Ordering::Relaxed);
49 }
50}
51
52#[cfg(feature = "allocation-tracking")]
55thread_local! {
56 static HOT_PATH_DEPTH: Cell<usize> = const { Cell::new(0) };
58
59 static HOT_PATH_SECTION: Cell<Option<&'static str>> = const { Cell::new(None) };
61
62 static PANIC_ON_ALLOC: Cell<bool> = const { Cell::new(true) };
64}
65
66#[cfg(feature = "allocation-tracking")]
74pub(crate) fn enable_hot_path(section: &'static str) {
75 HOT_PATH_DEPTH.with(|d| {
76 let depth = d.get();
77 d.set(depth + 1);
78 if depth == 0 {
80 HOT_PATH_SECTION.with(|s| s.set(Some(section)));
81 }
82 });
83}
84
85#[cfg(feature = "allocation-tracking")]
89pub(crate) fn disable_hot_path() {
90 HOT_PATH_DEPTH.with(|d| {
91 let depth = d.get();
92 if depth > 0 {
93 d.set(depth - 1);
94 if depth == 1 {
95 HOT_PATH_SECTION.with(|s| s.set(None));
97 }
98 }
99 });
100}
101
102#[cfg(feature = "allocation-tracking")]
104#[must_use]
105pub fn is_hot_path_enabled() -> bool {
106 HOT_PATH_DEPTH.with(|d| d.get() > 0)
107}
108
109#[cfg(feature = "allocation-tracking")]
114pub fn set_panic_on_alloc(panic: bool) {
115 PANIC_ON_ALLOC.with(|p| p.set(panic));
116}
117
118#[cfg(feature = "allocation-tracking")]
120#[cold]
121#[inline(never)]
122fn check_hot_path_allocation(op: &str, size: usize) {
123 let is_hot = HOT_PATH_DEPTH.with(|d| d.get() > 0);
124
125 if is_hot {
126 HOT_PATH_ALLOC_COUNT.fetch_add(1, Ordering::Relaxed);
128 HOT_PATH_ALLOC_BYTES.fetch_add(size as u64, Ordering::Relaxed);
129
130 let should_panic = PANIC_ON_ALLOC.with(Cell::get);
132 if should_panic {
133 let section = HOT_PATH_SECTION.with(|s| s.get().unwrap_or("unknown"));
134 panic!(
135 "\n\
136 ╔══════════════════════════════════════════════════════════════╗\n\
137 ║ ALLOCATION IN HOT PATH DETECTED! ║\n\
138 ╠══════════════════════════════════════════════════════════════╣\n\
139 ║ Operation: {op:50} ║\n\
140 ║ Size: {size:50} ║\n\
141 ║ Section: {section:50} ║\n\
142 ╠══════════════════════════════════════════════════════════════╣\n\
143 ║ Hot path code must be zero-allocation. ║\n\
144 ║ Use pre-allocated buffers, ArrayVec, or ObjectPool instead. ║\n\
145 ╚══════════════════════════════════════════════════════════════╝\n",
146 op = op,
147 size = format!("{size} bytes"),
148 section = section
149 );
150 }
151 } else {
152 NORMAL_ALLOC_COUNT.fetch_add(1, Ordering::Relaxed);
153 NORMAL_ALLOC_BYTES.fetch_add(size as u64, Ordering::Relaxed);
154 }
155}
156
157#[cfg(feature = "allocation-tracking")]
175pub struct HotPathDetectingAlloc {
176 inner: System,
177}
178
179#[cfg(feature = "allocation-tracking")]
180impl HotPathDetectingAlloc {
181 #[must_use]
183 pub const fn new() -> Self {
184 Self { inner: System }
185 }
186}
187
188#[cfg(feature = "allocation-tracking")]
189impl Default for HotPathDetectingAlloc {
190 fn default() -> Self {
191 Self::new()
192 }
193}
194
195#[cfg(feature = "allocation-tracking")]
196unsafe impl GlobalAlloc for HotPathDetectingAlloc {
199 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
200 check_hot_path_allocation("alloc", layout.size());
201 self.inner.alloc(layout)
203 }
204
205 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
206 self.inner.dealloc(ptr, layout);
209 }
210
211 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
212 if new_size > layout.size() {
214 check_hot_path_allocation("realloc", new_size);
215 }
216 self.inner.realloc(ptr, layout, new_size)
218 }
219
220 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
221 check_hot_path_allocation("alloc_zeroed", layout.size());
222 self.inner.alloc_zeroed(layout)
224 }
225}
226
227#[cfg(test)]
228mod tests {
229 use super::*;
230
231 #[test]
232 fn test_stats_current_and_reset() {
233 AllocationStats::reset();
234
235 let stats = AllocationStats::current();
236 assert_eq!(stats.hot_path_allocations, 0);
237 assert_eq!(stats.normal_allocations, 0);
238 }
239
240 #[test]
241 #[cfg(feature = "allocation-tracking")]
242 fn test_hot_path_enable_disable() {
243 assert!(!is_hot_path_enabled());
244
245 enable_hot_path("test_section");
246 assert!(is_hot_path_enabled());
247
248 disable_hot_path();
249 assert!(!is_hot_path_enabled());
250 }
251}