tetsy_util_mem/
malloc_size.rs

1// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A crate for measuring the heap usage of data structures in a way that
12//! integrates with Firefox's memory reporting, particularly the use of
13//! mozjemalloc and DMD. In particular, it has the following features.
14//! - It isn't bound to a particular heap allocator.
15//! - It provides traits for both "shallow" and "deep" measurement, which gives
16//!   flexibility in the cases where the traits can't be used.
17//! - It allows for measuring blocks even when only an interior pointer can be
18//!   obtained for heap allocations, e.g. `HashSet` and `HashMap`. (This relies
19//!   on the heap allocator having suitable support, which mozjemalloc has.)
20//! - It allows handling of types like `Rc` and `Arc` by providing traits that
21//!   are different to the ones for non-graph structures.
22//!
23//! Suggested uses are as follows.
24//! - When possible, use the `MallocSizeOf` trait. (Deriving support is
25//!   provided by the `malloc_size_of_derive` crate.)
26//! - If you need an additional synchronization argument, provide a function
27//!   that is like the standard trait method, but with the extra argument.
28//! - If you need multiple measurements for a type, provide a function named
29//!   `add_size_of` that takes a mutable reference to a struct that contains
30//!   the multiple measurement fields.
31//! - When deep measurement (via `MallocSizeOf`) cannot be implemented for a
32//!   type, shallow measurement (via `MallocShallowSizeOf`) in combination with
33//!   iteration can be a useful substitute.
34//! - `Rc` and `Arc` are always tricky, which is why `MallocSizeOf` is not (and
35//!   should not be) implemented for them.
36//! - If an `Rc` or `Arc` is known to be a "primary" reference and can always
37//!   be measured, it should be measured via the `MallocUnconditionalSizeOf`
38//!   trait.
39//! - If an `Rc` or `Arc` should be measured only if it hasn't been seen
40//!   before, it should be measured via the `MallocConditionalSizeOf` trait.
41//! - Using universal function call syntax is a good idea when measuring boxed
42//!   fields in structs, because it makes it clear that the Box is being
43//!   measured as well as the thing it points to. E.g.
44//!   `<Box<_> as MallocSizeOf>::size_of(field, ops)`.
45
46//! This is an extended version of the Servo internal malloc_size crate.
47//! We should occasionally track the upstream changes/fixes and reintroduce them here, whenever applicable.
48
49#[cfg(not(feature = "std"))]
50use alloc::vec::Vec;
51#[cfg(feature = "std")]
52mod rstd {
53	pub use std::*;
54}
55#[cfg(not(feature = "std"))]
56mod rstd {
57	pub use core::*;
58	pub mod collections {
59		pub use alloc::collections::*;
60		pub use vec_deque::VecDeque;
61	}
62}
63
64#[cfg(feature = "std")]
65use std::sync::Arc;
66
67#[cfg(not(feature = "std"))]
68pub use alloc::boxed::Box;
69#[cfg(not(feature = "std"))]
70use core::ffi::c_void;
71#[cfg(feature = "std")]
72use rstd::hash::Hash;
73use rstd::marker::PhantomData;
74use rstd::mem::size_of;
75use rstd::ops::Range;
76use rstd::ops::{Deref, DerefMut};
77#[cfg(feature = "std")]
78use std::hash::BuildHasher;
79#[cfg(feature = "std")]
80use std::os::raw::c_void;
81
82/// A C function that takes a pointer to a heap allocation and returns its size.
83pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
84
85/// A closure implementing a stateful predicate on pointers.
86pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
87
88/// Operations used when measuring heap usage of data structures.
89pub struct MallocSizeOfOps {
90	/// A function that returns the size of a heap allocation.
91	size_of_op: VoidPtrToSizeFn,
92
93	/// Like `size_of_op`, but can take an interior pointer. Optional because
94	/// not all allocators support this operation. If it's not provided, some
95	/// memory measurements will actually be computed estimates rather than
96	/// real and accurate measurements.
97	enclosing_size_of_op: Option<VoidPtrToSizeFn>,
98
99	/// Check if a pointer has been seen before, and remember it for next time.
100	/// Useful when measuring `Rc`s and `Arc`s. Optional, because many places
101	/// don't need it.
102	have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
103}
104
105impl MallocSizeOfOps {
106	pub fn new(
107		size_of: VoidPtrToSizeFn,
108		malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
109		have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
110	) -> Self {
111		MallocSizeOfOps {
112			size_of_op: size_of,
113			enclosing_size_of_op: malloc_enclosing_size_of,
114			have_seen_ptr_op: have_seen_ptr,
115		}
116	}
117
118	/// Check if an allocation is empty. This relies on knowledge of how Rust
119	/// handles empty allocations, which may change in the future.
120	fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
121		// The correct condition is this:
122		//   `ptr as usize <= ::std::mem::align_of::<T>()`
123		// But we can't call align_of() on a ?Sized T. So we approximate it
124		// with the following. 256 is large enough that it should always be
125		// larger than the required alignment, but small enough that it is
126		// always in the first page of memory and therefore not a legitimate
127		// address.
128		return ptr as *const usize as usize <= 256;
129	}
130
131	/// Call `size_of_op` on `ptr`, first checking that the allocation isn't
132	/// empty, because some types (such as `Vec`) utilize empty allocations.
133	pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
134		if MallocSizeOfOps::is_empty(ptr) {
135			0
136		} else {
137			(self.size_of_op)(ptr as *const c_void)
138		}
139	}
140
141	/// Is an `enclosing_size_of_op` available?
142	pub fn has_malloc_enclosing_size_of(&self) -> bool {
143		self.enclosing_size_of_op.is_some()
144	}
145
146	/// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
147	/// must not be empty.
148	pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
149		assert!(!MallocSizeOfOps::is_empty(ptr));
150		(self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
151	}
152
153	/// Call `have_seen_ptr_op` on `ptr`.
154	pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
155		let have_seen_ptr_op = self.have_seen_ptr_op.as_mut().expect("missing have_seen_ptr_op");
156		have_seen_ptr_op(ptr as *const c_void)
157	}
158}
159
160/// Trait for measuring the "deep" heap usage of a data structure. This is the
161/// most commonly-used of the traits.
162pub trait MallocSizeOf {
163	/// Measure the heap usage of all descendant heap-allocated structures, but
164	/// not the space taken up by the value itself.
165	/// If `T::size_of` is a constant, consider implementing `constant_size` as well.
166	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
167
168	/// Used to optimize `MallocSizeOf` implementation for collections
169	/// like `Vec` and `HashMap` to avoid iterating over them unnecessarily.
170	/// The `Self: Sized` bound is for object safety.
171	fn constant_size() -> Option<usize>
172	where
173		Self: Sized,
174	{
175		None
176	}
177}
178
179/// Trait for measuring the "shallow" heap usage of a container.
180pub trait MallocShallowSizeOf {
181	/// Measure the heap usage of immediate heap-allocated descendant
182	/// structures, but not the space taken up by the value itself. Anything
183	/// beyond the immediate descendants must be measured separately, using
184	/// iteration.
185	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
186}
187
188/// Like `MallocSizeOf`, but with a different name so it cannot be used
189/// accidentally with derive(MallocSizeOf). For use with types like `Rc` and
190/// `Arc` when appropriate (e.g. when measuring a "primary" reference).
191pub trait MallocUnconditionalSizeOf {
192	/// Measure the heap usage of all heap-allocated descendant structures, but
193	/// not the space taken up by the value itself.
194	fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
195}
196
197/// `MallocUnconditionalSizeOf` combined with `MallocShallowSizeOf`.
198pub trait MallocUnconditionalShallowSizeOf {
199	/// `unconditional_size_of` combined with `shallow_size_of`.
200	fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
201}
202
203/// Like `MallocSizeOf`, but only measures if the value hasn't already been
204/// measured. For use with types like `Rc` and `Arc` when appropriate (e.g.
205/// when there is no "primary" reference).
206pub trait MallocConditionalSizeOf {
207	/// Measure the heap usage of all heap-allocated descendant structures, but
208	/// not the space taken up by the value itself, and only if that heap usage
209	/// hasn't already been measured.
210	fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
211}
212
213/// `MallocConditionalSizeOf` combined with `MallocShallowSizeOf`.
214pub trait MallocConditionalShallowSizeOf {
215	/// `conditional_size_of` combined with `shallow_size_of`.
216	fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
217}
218
219#[cfg(not(any(all(target_os = "macos", not(feature = "jemalloc-global"),), feature = "estimate-heapsize")))]
220pub mod inner_allocator_use {
221
222	use super::*;
223
224	#[cfg(not(feature = "std"))]
225	use alloc::string::String;
226
227	impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
228		fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
229			unsafe { ops.malloc_size_of(&**self) }
230		}
231	}
232
233	impl<T> MallocShallowSizeOf for Vec<T> {
234		fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
235			unsafe { ops.malloc_size_of(self.as_ptr()) }
236		}
237	}
238
239	// currently this seems only fine with jemalloc
240	#[cfg(feature = "std")]
241	#[cfg(all(feature = "jemalloc-global", not(target_os = "windows")))]
242	impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
243		fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
244			unsafe { ops.malloc_size_of(arc_ptr(self)) }
245		}
246	}
247
248	#[cfg(feature = "std")]
249	#[cfg(not(all(feature = "jemalloc-global", not(target_os = "windows"))))]
250	impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
251		fn unconditional_shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
252			size_of::<T>()
253		}
254	}
255
256	impl MallocSizeOf for String {
257		fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
258			unsafe { ops.malloc_size_of(self.as_ptr()) }
259		}
260	}
261}
262
263impl<'a, T: ?Sized> MallocSizeOf for &'a T {
264	fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
265		// Zero makes sense for a non-owning reference.
266		0
267	}
268	fn constant_size() -> Option<usize> {
269		Some(0)
270	}
271}
272
273impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
274	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
275		self.shallow_size_of(ops) + (**self).size_of(ops)
276	}
277}
278
279#[impl_trait_for_tuples::impl_for_tuples(12)]
280impl MallocSizeOf for Tuple {
281	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
282		let mut result = 0;
283		for_tuples!( #( result += Tuple.size_of(ops); )* );
284		result
285	}
286	fn constant_size() -> Option<usize> {
287		let mut result = Some(0);
288		for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
289		result
290	}
291}
292
293impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
294	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
295		if let Some(val) = self.as_ref() {
296			val.size_of(ops)
297		} else {
298			0
299		}
300	}
301	fn constant_size() -> Option<usize> {
302		T::constant_size().filter(|s| *s == 0)
303	}
304}
305
306impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
307	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
308		match *self {
309			Ok(ref x) => x.size_of(ops),
310			Err(ref e) => e.size_of(ops),
311		}
312	}
313	fn constant_size() -> Option<usize> {
314		// Result<T, E> has constant size iff T::constant_size == E::constant_size
315		T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
316	}
317}
318
319impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
320	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
321		self.get().size_of(ops)
322	}
323	fn constant_size() -> Option<usize> {
324		T::constant_size()
325	}
326}
327
328impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
329	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
330		self.borrow().size_of(ops)
331	}
332	fn constant_size() -> Option<usize> {
333		T::constant_size()
334	}
335}
336
337#[cfg(feature = "std")]
338impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
339where
340	B::Owned: MallocSizeOf,
341{
342	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
343		match *self {
344			std::borrow::Cow::Borrowed(_) => 0,
345			std::borrow::Cow::Owned(ref b) => b.size_of(ops),
346		}
347	}
348}
349
350impl<T: MallocSizeOf> MallocSizeOf for [T] {
351	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
352		let mut n = 0;
353		if let Some(t) = T::constant_size() {
354			n += self.len() * t;
355		} else {
356			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
357		}
358		n
359	}
360}
361
362impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
363	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
364		let mut n = self.shallow_size_of(ops);
365		if let Some(t) = T::constant_size() {
366			n += self.len() * t;
367		} else {
368			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
369		}
370		n
371	}
372}
373
374impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
375	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
376		if ops.has_malloc_enclosing_size_of() {
377			if let Some(front) = self.front() {
378				// The front element is an interior pointer.
379				unsafe { ops.malloc_enclosing_size_of(&*front) }
380			} else {
381				// This assumes that no memory is allocated when the VecDeque is empty.
382				0
383			}
384		} else {
385			// An estimate.
386			self.capacity() * size_of::<T>()
387		}
388	}
389}
390
391impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
392	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
393		let mut n = self.shallow_size_of(ops);
394		if let Some(t) = T::constant_size() {
395			n += self.len() * t;
396		} else {
397			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
398		}
399		n
400	}
401}
402
403#[cfg(feature = "std")]
404impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
405where
406	T: Eq + Hash,
407	S: BuildHasher,
408{
409	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
410		if ops.has_malloc_enclosing_size_of() {
411			// The first value from the iterator gives us an interior pointer.
412			// `ops.malloc_enclosing_size_of()` then gives us the storage size.
413			// This assumes that the `HashSet`'s contents (values and hashes)
414			// are all stored in a single contiguous heap allocation.
415			self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
416		} else {
417			// An estimate.
418			self.capacity() * (size_of::<T>() + size_of::<usize>())
419		}
420	}
421}
422
423#[cfg(feature = "std")]
424impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
425where
426	T: Eq + Hash + MallocSizeOf,
427	S: BuildHasher,
428{
429	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
430		let mut n = self.shallow_size_of(ops);
431		if let Some(t) = T::constant_size() {
432			n += self.len() * t;
433		} else {
434			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
435		}
436		n
437	}
438}
439
440impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
441	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
442		self.0.size_of(ops)
443	}
444	fn constant_size() -> Option<usize> {
445		I::constant_size()
446	}
447}
448
449#[cfg(feature = "std")]
450impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
451	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
452		// See the implementation for std::collections::HashSet for details.
453		if ops.has_malloc_enclosing_size_of() {
454			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
455		} else {
456			self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
457		}
458	}
459}
460
461#[cfg(feature = "std")]
462impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
463where
464	K: MallocSizeOf,
465	V: MallocSizeOf,
466{
467	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
468		let mut n = self.shallow_size_of(ops);
469		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
470			n += self.len() * (k + v)
471		} else {
472			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
473		}
474		n
475	}
476}
477
478impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
479	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
480		if ops.has_malloc_enclosing_size_of() {
481			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
482		} else {
483			self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
484		}
485	}
486}
487
488impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
489where
490	K: MallocSizeOf,
491	V: MallocSizeOf,
492{
493	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
494		let mut n = self.shallow_size_of(ops);
495		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
496			n += self.len() * (k + v)
497		} else {
498			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
499		}
500		n
501	}
502}
503
504impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
505	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
506		if ops.has_malloc_enclosing_size_of() {
507			// See implementation for HashSet how this works.
508			self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
509		} else {
510			// An estimate.
511			self.len() * (size_of::<T>() + size_of::<usize>())
512		}
513	}
514}
515
516impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
517where
518	T: MallocSizeOf,
519{
520	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
521		let mut n = self.shallow_size_of(ops);
522		if let Some(t) = T::constant_size() {
523			n += self.len() * t;
524		} else {
525			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
526		}
527		n
528	}
529}
530
531// XXX: we don't want MallocSizeOf to be defined for Rc and Arc. If negative
532// trait bounds are ever allowed, this code should be uncommented.
533// (We do have a compile-fail test for this:
534// rc_arc_must_not_derive_malloc_size_of.rs)
535//impl<T> !MallocSizeOf for Arc<T> { }
536//impl<T> !MallocShallowSizeOf for Arc<T> { }
537
538#[cfg(feature = "std")]
539fn arc_ptr<T>(s: &Arc<T>) -> *const T {
540	&(**s) as *const T
541}
542
543#[cfg(feature = "std")]
544impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
545	fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
546		self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
547	}
548}
549
550#[cfg(feature = "std")]
551impl<T> MallocConditionalShallowSizeOf for Arc<T> {
552	fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
553		if ops.have_seen_ptr(arc_ptr(self)) {
554			0
555		} else {
556			self.unconditional_shallow_size_of(ops)
557		}
558	}
559}
560
561#[cfg(feature = "std")]
562impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
563	fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
564		if ops.have_seen_ptr(arc_ptr(self)) {
565			0
566		} else {
567			self.unconditional_size_of(ops)
568		}
569	}
570}
571
572/// If a mutex is stored directly as a member of a data type that is being measured,
573/// it is the unique owner of its contents and deserves to be measured.
574///
575/// If a mutex is stored inside of an Arc value as a member of a data type that is being measured,
576/// the Arc will not be automatically measured so there is no risk of overcounting the mutex's
577/// contents.
578///
579/// The same reasoning applies to RwLock.
580#[cfg(feature = "std")]
581impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
582	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
583		self.lock().unwrap().size_of(ops)
584	}
585}
586
587#[cfg(feature = "std")]
588impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
589	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
590		self.lock().size_of(ops)
591	}
592}
593
594#[cfg(feature = "std")]
595impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
596	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
597		self.read().unwrap().size_of(ops)
598	}
599}
600
601#[cfg(feature = "std")]
602impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
603	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
604		self.read().size_of(ops)
605	}
606}
607
608/// Implement notion of 0 allocation size for some type(s).
609///
610/// if used for generics, by default it will require that generaic arguments
611/// should implement `MallocSizeOf`. This can be avoided with passing "any: "
612/// in front of type list.
613///
614/// ```rust
615/// use tetsy_util_mem::{malloc_size, malloc_size_of_is_0};
616///
617/// struct Data<P> {
618/// 	phantom: std::marker::PhantomData<P>,
619/// }
620///
621/// malloc_size_of_is_0!(any: Data<P>);
622///
623/// // MallocSizeOf is NOT implemented for [u8; 333]
624/// assert_eq!(malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
625/// ```
626///
627/// and when no "any: "
628///
629/// ```rust
630/// use tetsy_util_mem::{malloc_size, malloc_size_of_is_0};
631///
632/// struct Data<T>(pub T);
633///
634/// // generic argument (`T`) must be `impl MallocSizeOf`
635/// malloc_size_of_is_0!(Data<u8>);
636///
637/// assert_eq!(malloc_size(&Data(0u8)), 0);
638/// ```
639#[macro_export]
640macro_rules! malloc_size_of_is_0(
641	($($ty:ty),+) => (
642		$(
643			impl $crate::MallocSizeOf for $ty {
644				#[inline(always)]
645				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
646					0
647				}
648				#[inline(always)]
649				fn constant_size() -> Option<usize> { Some(0) }
650			}
651		)+
652	);
653	(any: $($ty:ident<$($gen:ident),+>),+) => (
654		$(
655			impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
656				#[inline(always)]
657				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
658					0
659				}
660				#[inline(always)]
661				fn constant_size() -> Option<usize> { Some(0) }
662			}
663		)+
664	);
665	($($ty:ident<$($gen:ident),+>),+) => (
666		$(
667			impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
668				#[inline(always)]
669				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
670					0
671				}
672				#[inline(always)]
673				fn constant_size() -> Option<usize> { Some(0) }
674			}
675		)+
676	);
677);
678
679malloc_size_of_is_0!(bool, char, str);
680malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
681malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
682malloc_size_of_is_0!(f32, f64);
683
684malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
685malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
686malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
687
688malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
689malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
690malloc_size_of_is_0!(Range<f32>, Range<f64>);
691malloc_size_of_is_0!(any: PhantomData<T>);
692
693/// Measurable that defers to inner value and used to verify MallocSizeOf implementation in a
694/// struct.
695#[derive(Clone)]
696pub struct Measurable<T: MallocSizeOf>(pub T);
697
698impl<T: MallocSizeOf> Deref for Measurable<T> {
699	type Target = T;
700
701	fn deref(&self) -> &T {
702		&self.0
703	}
704}
705
706impl<T: MallocSizeOf> DerefMut for Measurable<T> {
707	fn deref_mut(&mut self) -> &mut T {
708		&mut self.0
709	}
710}
711
712#[cfg(feature = "hashbrown")]
713impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
714	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
715		// See the implementation for std::collections::HashSet for details.
716		if ops.has_malloc_enclosing_size_of() {
717			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
718		} else {
719			self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
720		}
721	}
722}
723
724#[cfg(feature = "hashbrown")]
725impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
726where
727	K: MallocSizeOf,
728	V: MallocSizeOf,
729{
730	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
731		let mut n = self.shallow_size_of(ops);
732		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
733			n += self.len() * (k + v)
734		} else {
735			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
736		}
737		n
738	}
739}
740
741#[cfg(feature = "lru")]
742impl<K, V, S> MallocSizeOf for lru::LruCache<K, V, S>
743where
744	K: MallocSizeOf + rstd::cmp::Eq + rstd::hash::Hash,
745	V: MallocSizeOf,
746	S: rstd::hash::BuildHasher,
747{
748	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
749		let mut n = 0;
750		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
751			n += self.len() * (k + v)
752		} else {
753			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
754		}
755		n
756	}
757}
758
759malloc_size_of_is_0!(
760	[u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10], [u8; 11], [u8; 12],
761	[u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19], [u8; 20], [u8; 21], [u8; 22], [u8; 23],
762	[u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28], [u8; 29], [u8; 30], [u8; 31], [u8; 32]
763);
764
765macro_rules! impl_smallvec {
766	($size: expr) => {
767		#[cfg(feature = "smallvec")]
768		impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
769		where
770			T: MallocSizeOf,
771		{
772			fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
773				let mut n = if self.spilled() { self.capacity() * core::mem::size_of::<T>() } else { 0 };
774				if let Some(t) = T::constant_size() {
775					n += self.len() * t;
776				} else {
777					n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
778				}
779				n
780			}
781		}
782	};
783}
784
785impl_smallvec!(32); // tetsy-kvdb uses this
786impl_smallvec!(36); // trie-db uses this
787
788#[cfg(feature = "std")]
789malloc_size_of_is_0!(std::time::Instant);
790#[cfg(feature = "std")]
791malloc_size_of_is_0!(std::time::Duration);
792
793#[cfg(all(test, feature = "std"))] // tests are using std implementations
794mod tests {
795	use crate::{allocators::new_malloc_size_ops, MallocSizeOf, MallocSizeOfOps};
796	use smallvec::SmallVec;
797	use std::collections::BTreeSet;
798	use std::mem;
799	impl_smallvec!(3);
800
801	#[test]
802	fn test_smallvec_stack_allocated_type() {
803		let mut v: SmallVec<[u8; 3]> = SmallVec::new();
804		let mut ops = new_malloc_size_ops();
805		assert_eq!(v.size_of(&mut ops), 0);
806		v.push(1);
807		v.push(2);
808		v.push(3);
809		assert_eq!(v.size_of(&mut ops), 0);
810		assert!(!v.spilled());
811		v.push(4);
812		assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
813		assert_eq!(v.size_of(&mut ops), 4); // 4 u8s on the heap
814	}
815
816	#[test]
817	fn test_smallvec_boxed_stack_allocated_type() {
818		let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
819		let mut ops = new_malloc_size_ops();
820		assert_eq!(v.size_of(&mut ops), 0);
821		v.push(Box::new(1u8));
822		v.push(Box::new(2u8));
823		v.push(Box::new(3u8));
824		assert!(v.size_of(&mut ops) >= 3);
825		assert!(!v.spilled());
826		v.push(Box::new(4u8));
827		assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
828		let mut ops = new_malloc_size_ops();
829		let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
830		assert!(v.size_of(&mut ops) >= expected_min_allocs);
831	}
832
833	#[test]
834	fn test_smallvec_heap_allocated_type() {
835		let mut v: SmallVec<[String; 3]> = SmallVec::new();
836		let mut ops = new_malloc_size_ops();
837		assert_eq!(v.size_of(&mut ops), 0);
838		v.push("COW".into());
839		v.push("PIG".into());
840		v.push("DUCK".into());
841		assert!(!v.spilled());
842		assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
843		v.push("ÖWL".into());
844		assert!(v.spilled());
845		let mut ops = new_malloc_size_ops();
846		let expected_min_allocs = mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
847		assert!(v.size_of(&mut ops) >= expected_min_allocs);
848	}
849
850	#[test]
851	fn test_large_vec() {
852		const N: usize = 128 * 1024 * 1024;
853		let val = vec![1u8; N];
854		let mut ops = new_malloc_size_ops();
855		assert!(val.size_of(&mut ops) >= N);
856		assert!(val.size_of(&mut ops) < 2 * N);
857	}
858
859	#[test]
860	fn btree_set() {
861		let mut set = BTreeSet::new();
862		for t in 0..100 {
863			set.insert(vec![t]);
864		}
865		// ~36 per value
866		assert!(crate::malloc_size(&set) > 3000);
867	}
868
869	#[test]
870	fn special_malloc_size_of_0() {
871		struct Data<P> {
872			phantom: std::marker::PhantomData<P>,
873		}
874
875		malloc_size_of_is_0!(any: Data<P>);
876
877		// MallocSizeOf is not implemented for [u8; 333]
878		assert_eq!(crate::malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
879	}
880
881	#[test]
882	fn constant_size() {
883		struct AlwaysTwo(Vec<u8>);
884
885		impl MallocSizeOf for AlwaysTwo {
886			fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
887				self.0.size_of(ops)
888			}
889			fn constant_size() -> Option<usize> {
890				Some(2)
891			}
892		}
893
894		assert_eq!(AlwaysTwo::constant_size(), Some(2));
895		assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
896		assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
897		assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
898		assert_eq!(Result::<(), ()>::constant_size(), Some(0));
899		assert_eq!(<(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(), Some(2 + 2));
900		assert_eq!(Option::<u8>::constant_size(), Some(0));
901		assert_eq!(<&String>::constant_size(), Some(0));
902
903		assert_eq!(<String>::constant_size(), None);
904		assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
905		assert_eq!(Result::<(), String>::constant_size(), None);
906		assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
907	}
908}