re_memory/
allocation_tracker.rs1use std::sync::Arc;
2
3use crate::{Backtrace, BacktraceHash, CountAndSize};
4
5#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
9pub struct PtrHash(u64);
10
11impl nohash_hasher::IsEnabled for PtrHash {}
12
13impl PtrHash {
14 #[inline]
15 pub fn new(ptr: *mut u8) -> Self {
16 let hash = ahash::RandomState::with_seeds(1, 2, 3, 4).hash_one(ptr);
17 Self(hash)
18 }
19}
20
21#[derive(Clone)]
27pub struct ReadableBacktrace {
28 readable: Arc<str>,
30}
31
32impl std::fmt::Debug for ReadableBacktrace {
33 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
34 self.readable.fmt(f)
35 }
36}
37
38impl std::fmt::Display for ReadableBacktrace {
39 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
40 self.readable.fmt(f)
41 }
42}
43
44impl ReadableBacktrace {
45 #[allow(clippy::allow_attributes, unused_mut)] fn new(mut backtrace: Backtrace) -> Self {
47 Self {
48 readable: backtrace.format(),
49 }
50 }
51
52 #[inline]
53 pub fn as_str(&self) -> &str {
54 &self.readable
55 }
56
57 #[inline]
58 pub fn as_arc_str(&self) -> &Arc<str> {
59 &self.readable
60 }
61}
62
63#[derive(Clone, Debug)]
67pub struct CallstackStatistics {
68 pub readable_backtrace: ReadableBacktrace,
70
71 pub stochastic_rate: usize,
77
78 pub extant: CountAndSize,
83}
84
85pub struct AllocationTracker {
89 stochastic_rate: usize,
91
92 readable_backtraces: nohash_hasher::IntMap<BacktraceHash, ReadableBacktrace>,
94
95 live_allocs: nohash_hasher::IntMap<PtrHash, BacktraceHash>,
97
98 callstack_stats: nohash_hasher::IntMap<BacktraceHash, CountAndSize>,
100}
101
102impl AllocationTracker {
103 pub fn with_stochastic_rate(stochastic_rate: usize) -> Self {
104 assert!(stochastic_rate != 0);
105 assert!(stochastic_rate.is_power_of_two());
106 Self {
107 stochastic_rate,
108 readable_backtraces: Default::default(),
109 live_allocs: Default::default(),
110 callstack_stats: Default::default(),
111 }
112 }
113
114 fn should_sample(&self, ptr: PtrHash) -> bool {
115 ptr.0 & (self.stochastic_rate as u64 - 1) == 0
116 }
117
118 pub fn on_alloc(&mut self, ptr: PtrHash, size: usize) {
119 if !self.should_sample(ptr) {
120 return;
121 }
122
123 let unresolved_backtrace = Backtrace::new_unresolved();
124 let hash = BacktraceHash::new(&unresolved_backtrace);
125
126 self.readable_backtraces
127 .entry(hash)
128 .or_insert_with(|| ReadableBacktrace::new(unresolved_backtrace));
129
130 {
131 self.callstack_stats.entry(hash).or_default().add(size);
132 }
133
134 self.live_allocs.insert(ptr, hash);
135 }
136
137 pub fn on_dealloc(&mut self, ptr: PtrHash, size: usize) {
138 if !self.should_sample(ptr) {
139 return;
140 }
141
142 if let Some(hash) = self.live_allocs.remove(&ptr)
143 && let std::collections::hash_map::Entry::Occupied(mut entry) =
144 self.callstack_stats.entry(hash)
145 {
146 let stats = entry.get_mut();
147 stats.sub(size);
148
149 if stats.size == 0 {
151 entry.remove();
152 }
153 }
154 }
155
156 pub fn top_callstacks(&self, n: usize) -> Vec<CallstackStatistics> {
158 let mut vec: Vec<_> = self
159 .callstack_stats
160 .iter()
161 .filter(|(_hash, c)| c.count > 0)
162 .filter_map(|(hash, c)| {
163 Some(CallstackStatistics {
164 readable_backtrace: self.readable_backtraces.get(hash)?.clone(),
165 stochastic_rate: self.stochastic_rate,
166 extant: *c,
167 })
168 })
169 .collect();
170
171 #[expect(clippy::cast_possible_wrap)]
173 vec.sort_by_key(|stats| -(stats.extant.size as i64));
174 vec.truncate(n);
175 vec.shrink_to_fit();
176 vec
177 }
178}