1#[cfg(not(feature = "std"))]
50use alloc::vec::Vec;
51#[cfg(feature = "std")]
52mod rstd {
53 pub use std::*;
54}
55#[cfg(not(feature = "std"))]
56mod rstd {
57 pub use core::*;
58 pub mod collections {
59 pub use alloc::collections::*;
60 pub use vec_deque::VecDeque;
61 }
62}
63
64#[cfg(feature = "std")]
65use std::sync::Arc;
66
67#[cfg(not(feature = "std"))]
68pub use alloc::boxed::Box;
69#[cfg(not(feature = "std"))]
70use core::ffi::c_void;
71#[cfg(feature = "std")]
72use rstd::hash::Hash;
73use rstd::marker::PhantomData;
74use rstd::mem::size_of;
75use rstd::ops::Range;
76use rstd::ops::{Deref, DerefMut};
77#[cfg(feature = "std")]
78use std::hash::BuildHasher;
79#[cfg(feature = "std")]
80use std::os::raw::c_void;
81
82pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
84
85pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
87
88pub struct MallocSizeOfOps {
90 size_of_op: VoidPtrToSizeFn,
92
93 enclosing_size_of_op: Option<VoidPtrToSizeFn>,
98
99 have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
103}
104
105impl MallocSizeOfOps {
106 pub fn new(
107 size_of: VoidPtrToSizeFn,
108 malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
109 have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
110 ) -> Self {
111 MallocSizeOfOps {
112 size_of_op: size_of,
113 enclosing_size_of_op: malloc_enclosing_size_of,
114 have_seen_ptr_op: have_seen_ptr,
115 }
116 }
117
118 fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
121 return ptr as *const usize as usize <= 256;
129 }
130
131 pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
134 if MallocSizeOfOps::is_empty(ptr) {
135 0
136 } else {
137 (self.size_of_op)(ptr as *const c_void)
138 }
139 }
140
141 pub fn has_malloc_enclosing_size_of(&self) -> bool {
143 self.enclosing_size_of_op.is_some()
144 }
145
146 pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
149 assert!(!MallocSizeOfOps::is_empty(ptr));
150 (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
151 }
152
153 pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
155 let have_seen_ptr_op = self.have_seen_ptr_op.as_mut().expect("missing have_seen_ptr_op");
156 have_seen_ptr_op(ptr as *const c_void)
157 }
158}
159
160pub trait MallocSizeOf {
163 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
167
168 fn constant_size() -> Option<usize>
172 where
173 Self: Sized,
174 {
175 None
176 }
177}
178
179pub trait MallocShallowSizeOf {
181 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
186}
187
188pub trait MallocUnconditionalSizeOf {
192 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
195}
196
197pub trait MallocUnconditionalShallowSizeOf {
199 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
201}
202
203pub trait MallocConditionalSizeOf {
207 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
211}
212
213pub trait MallocConditionalShallowSizeOf {
215 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
217}
218
219#[cfg(not(any(all(target_os = "macos", not(feature = "jemalloc-global"),), feature = "estimate-heapsize")))]
220pub mod inner_allocator_use {
221
222 use super::*;
223
224 #[cfg(not(feature = "std"))]
225 use alloc::string::String;
226
227 impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
228 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
229 unsafe { ops.malloc_size_of(&**self) }
230 }
231 }
232
233 impl<T> MallocShallowSizeOf for Vec<T> {
234 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
235 unsafe { ops.malloc_size_of(self.as_ptr()) }
236 }
237 }
238
239 #[cfg(feature = "std")]
241 #[cfg(all(feature = "jemalloc-global", not(target_os = "windows")))]
242 impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
243 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
244 unsafe { ops.malloc_size_of(arc_ptr(self)) }
245 }
246 }
247
248 #[cfg(feature = "std")]
249 #[cfg(not(all(feature = "jemalloc-global", not(target_os = "windows"))))]
250 impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
251 fn unconditional_shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
252 size_of::<T>()
253 }
254 }
255
256 impl MallocSizeOf for String {
257 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
258 unsafe { ops.malloc_size_of(self.as_ptr()) }
259 }
260 }
261}
262
263impl<'a, T: ?Sized> MallocSizeOf for &'a T {
264 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
265 0
267 }
268 fn constant_size() -> Option<usize> {
269 Some(0)
270 }
271}
272
273impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
274 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
275 self.shallow_size_of(ops) + (**self).size_of(ops)
276 }
277}
278
279#[impl_trait_for_tuples::impl_for_tuples(12)]
280impl MallocSizeOf for Tuple {
281 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
282 let mut result = 0;
283 for_tuples!( #( result += Tuple.size_of(ops); )* );
284 result
285 }
286 fn constant_size() -> Option<usize> {
287 let mut result = Some(0);
288 for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
289 result
290 }
291}
292
293impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
294 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
295 if let Some(val) = self.as_ref() {
296 val.size_of(ops)
297 } else {
298 0
299 }
300 }
301 fn constant_size() -> Option<usize> {
302 T::constant_size().filter(|s| *s == 0)
303 }
304}
305
306impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
307 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
308 match *self {
309 Ok(ref x) => x.size_of(ops),
310 Err(ref e) => e.size_of(ops),
311 }
312 }
313 fn constant_size() -> Option<usize> {
314 T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
316 }
317}
318
319impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
320 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
321 self.get().size_of(ops)
322 }
323 fn constant_size() -> Option<usize> {
324 T::constant_size()
325 }
326}
327
328impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
329 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
330 self.borrow().size_of(ops)
331 }
332 fn constant_size() -> Option<usize> {
333 T::constant_size()
334 }
335}
336
337#[cfg(feature = "std")]
338impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
339where
340 B::Owned: MallocSizeOf,
341{
342 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
343 match *self {
344 std::borrow::Cow::Borrowed(_) => 0,
345 std::borrow::Cow::Owned(ref b) => b.size_of(ops),
346 }
347 }
348}
349
350impl<T: MallocSizeOf> MallocSizeOf for [T] {
351 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
352 let mut n = 0;
353 if let Some(t) = T::constant_size() {
354 n += self.len() * t;
355 } else {
356 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
357 }
358 n
359 }
360}
361
362impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
363 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
364 let mut n = self.shallow_size_of(ops);
365 if let Some(t) = T::constant_size() {
366 n += self.len() * t;
367 } else {
368 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
369 }
370 n
371 }
372}
373
374impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
375 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
376 if ops.has_malloc_enclosing_size_of() {
377 if let Some(front) = self.front() {
378 unsafe { ops.malloc_enclosing_size_of(&*front) }
380 } else {
381 0
383 }
384 } else {
385 self.capacity() * size_of::<T>()
387 }
388 }
389}
390
391impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
392 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
393 let mut n = self.shallow_size_of(ops);
394 if let Some(t) = T::constant_size() {
395 n += self.len() * t;
396 } else {
397 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
398 }
399 n
400 }
401}
402
403#[cfg(feature = "std")]
404impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
405where
406 T: Eq + Hash,
407 S: BuildHasher,
408{
409 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
410 if ops.has_malloc_enclosing_size_of() {
411 self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
416 } else {
417 self.capacity() * (size_of::<T>() + size_of::<usize>())
419 }
420 }
421}
422
423#[cfg(feature = "std")]
424impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
425where
426 T: Eq + Hash + MallocSizeOf,
427 S: BuildHasher,
428{
429 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
430 let mut n = self.shallow_size_of(ops);
431 if let Some(t) = T::constant_size() {
432 n += self.len() * t;
433 } else {
434 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
435 }
436 n
437 }
438}
439
440impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
441 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
442 self.0.size_of(ops)
443 }
444 fn constant_size() -> Option<usize> {
445 I::constant_size()
446 }
447}
448
449#[cfg(feature = "std")]
450impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
451 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
452 if ops.has_malloc_enclosing_size_of() {
454 self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
455 } else {
456 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
457 }
458 }
459}
460
461#[cfg(feature = "std")]
462impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
463where
464 K: MallocSizeOf,
465 V: MallocSizeOf,
466{
467 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
468 let mut n = self.shallow_size_of(ops);
469 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
470 n += self.len() * (k + v)
471 } else {
472 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
473 }
474 n
475 }
476}
477
478impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
479 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
480 if ops.has_malloc_enclosing_size_of() {
481 self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
482 } else {
483 self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
484 }
485 }
486}
487
488impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
489where
490 K: MallocSizeOf,
491 V: MallocSizeOf,
492{
493 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
494 let mut n = self.shallow_size_of(ops);
495 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
496 n += self.len() * (k + v)
497 } else {
498 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
499 }
500 n
501 }
502}
503
504impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
505 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
506 if ops.has_malloc_enclosing_size_of() {
507 self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
509 } else {
510 self.len() * (size_of::<T>() + size_of::<usize>())
512 }
513 }
514}
515
516impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
517where
518 T: MallocSizeOf,
519{
520 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
521 let mut n = self.shallow_size_of(ops);
522 if let Some(t) = T::constant_size() {
523 n += self.len() * t;
524 } else {
525 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
526 }
527 n
528 }
529}
530
531#[cfg(feature = "std")]
539fn arc_ptr<T>(s: &Arc<T>) -> *const T {
540 &(**s) as *const T
541}
542
543#[cfg(feature = "std")]
544impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
545 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
546 self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
547 }
548}
549
550#[cfg(feature = "std")]
551impl<T> MallocConditionalShallowSizeOf for Arc<T> {
552 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
553 if ops.have_seen_ptr(arc_ptr(self)) {
554 0
555 } else {
556 self.unconditional_shallow_size_of(ops)
557 }
558 }
559}
560
561#[cfg(feature = "std")]
562impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
563 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
564 if ops.have_seen_ptr(arc_ptr(self)) {
565 0
566 } else {
567 self.unconditional_size_of(ops)
568 }
569 }
570}
571
572#[cfg(feature = "std")]
581impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
582 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
583 self.lock().unwrap().size_of(ops)
584 }
585}
586
587#[cfg(feature = "std")]
588impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
589 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
590 self.lock().size_of(ops)
591 }
592}
593
594#[cfg(feature = "std")]
595impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
596 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
597 self.read().unwrap().size_of(ops)
598 }
599}
600
601#[cfg(feature = "std")]
602impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
603 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
604 self.read().size_of(ops)
605 }
606}
607
608#[macro_export]
640macro_rules! malloc_size_of_is_0(
641 ($($ty:ty),+) => (
642 $(
643 impl $crate::MallocSizeOf for $ty {
644 #[inline(always)]
645 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
646 0
647 }
648 #[inline(always)]
649 fn constant_size() -> Option<usize> { Some(0) }
650 }
651 )+
652 );
653 (any: $($ty:ident<$($gen:ident),+>),+) => (
654 $(
655 impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
656 #[inline(always)]
657 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
658 0
659 }
660 #[inline(always)]
661 fn constant_size() -> Option<usize> { Some(0) }
662 }
663 )+
664 );
665 ($($ty:ident<$($gen:ident),+>),+) => (
666 $(
667 impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
668 #[inline(always)]
669 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
670 0
671 }
672 #[inline(always)]
673 fn constant_size() -> Option<usize> { Some(0) }
674 }
675 )+
676 );
677);
678
679malloc_size_of_is_0!(bool, char, str);
680malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
681malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
682malloc_size_of_is_0!(f32, f64);
683
684malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
685malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
686malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
687
688malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
689malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
690malloc_size_of_is_0!(Range<f32>, Range<f64>);
691malloc_size_of_is_0!(any: PhantomData<T>);
692
693#[derive(Clone)]
696pub struct Measurable<T: MallocSizeOf>(pub T);
697
698impl<T: MallocSizeOf> Deref for Measurable<T> {
699 type Target = T;
700
701 fn deref(&self) -> &T {
702 &self.0
703 }
704}
705
706impl<T: MallocSizeOf> DerefMut for Measurable<T> {
707 fn deref_mut(&mut self) -> &mut T {
708 &mut self.0
709 }
710}
711
712#[cfg(feature = "hashbrown")]
713impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
714 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
715 if ops.has_malloc_enclosing_size_of() {
717 self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
718 } else {
719 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
720 }
721 }
722}
723
724#[cfg(feature = "hashbrown")]
725impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
726where
727 K: MallocSizeOf,
728 V: MallocSizeOf,
729{
730 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
731 let mut n = self.shallow_size_of(ops);
732 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
733 n += self.len() * (k + v)
734 } else {
735 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
736 }
737 n
738 }
739}
740
741#[cfg(feature = "lru")]
742impl<K, V, S> MallocSizeOf for lru::LruCache<K, V, S>
743where
744 K: MallocSizeOf + rstd::cmp::Eq + rstd::hash::Hash,
745 V: MallocSizeOf,
746 S: rstd::hash::BuildHasher,
747{
748 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
749 let mut n = 0;
750 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
751 n += self.len() * (k + v)
752 } else {
753 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
754 }
755 n
756 }
757}
758
759malloc_size_of_is_0!(
760 [u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10], [u8; 11], [u8; 12],
761 [u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19], [u8; 20], [u8; 21], [u8; 22], [u8; 23],
762 [u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28], [u8; 29], [u8; 30], [u8; 31], [u8; 32]
763);
764
765macro_rules! impl_smallvec {
766 ($size: expr) => {
767 #[cfg(feature = "smallvec")]
768 impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
769 where
770 T: MallocSizeOf,
771 {
772 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
773 let mut n = if self.spilled() { self.capacity() * core::mem::size_of::<T>() } else { 0 };
774 if let Some(t) = T::constant_size() {
775 n += self.len() * t;
776 } else {
777 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
778 }
779 n
780 }
781 }
782 };
783}
784
785impl_smallvec!(32); impl_smallvec!(36); #[cfg(feature = "std")]
789malloc_size_of_is_0!(std::time::Instant);
790#[cfg(feature = "std")]
791malloc_size_of_is_0!(std::time::Duration);
792
793#[cfg(all(test, feature = "std"))] mod tests {
795 use crate::{allocators::new_malloc_size_ops, MallocSizeOf, MallocSizeOfOps};
796 use smallvec::SmallVec;
797 use std::collections::BTreeSet;
798 use std::mem;
799 impl_smallvec!(3);
800
801 #[test]
802 fn test_smallvec_stack_allocated_type() {
803 let mut v: SmallVec<[u8; 3]> = SmallVec::new();
804 let mut ops = new_malloc_size_ops();
805 assert_eq!(v.size_of(&mut ops), 0);
806 v.push(1);
807 v.push(2);
808 v.push(3);
809 assert_eq!(v.size_of(&mut ops), 0);
810 assert!(!v.spilled());
811 v.push(4);
812 assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
813 assert_eq!(v.size_of(&mut ops), 4); }
815
816 #[test]
817 fn test_smallvec_boxed_stack_allocated_type() {
818 let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
819 let mut ops = new_malloc_size_ops();
820 assert_eq!(v.size_of(&mut ops), 0);
821 v.push(Box::new(1u8));
822 v.push(Box::new(2u8));
823 v.push(Box::new(3u8));
824 assert!(v.size_of(&mut ops) >= 3);
825 assert!(!v.spilled());
826 v.push(Box::new(4u8));
827 assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
828 let mut ops = new_malloc_size_ops();
829 let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
830 assert!(v.size_of(&mut ops) >= expected_min_allocs);
831 }
832
833 #[test]
834 fn test_smallvec_heap_allocated_type() {
835 let mut v: SmallVec<[String; 3]> = SmallVec::new();
836 let mut ops = new_malloc_size_ops();
837 assert_eq!(v.size_of(&mut ops), 0);
838 v.push("COW".into());
839 v.push("PIG".into());
840 v.push("DUCK".into());
841 assert!(!v.spilled());
842 assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
843 v.push("ÖWL".into());
844 assert!(v.spilled());
845 let mut ops = new_malloc_size_ops();
846 let expected_min_allocs = mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
847 assert!(v.size_of(&mut ops) >= expected_min_allocs);
848 }
849
850 #[test]
851 fn test_large_vec() {
852 const N: usize = 128 * 1024 * 1024;
853 let val = vec![1u8; N];
854 let mut ops = new_malloc_size_ops();
855 assert!(val.size_of(&mut ops) >= N);
856 assert!(val.size_of(&mut ops) < 2 * N);
857 }
858
859 #[test]
860 fn btree_set() {
861 let mut set = BTreeSet::new();
862 for t in 0..100 {
863 set.insert(vec![t]);
864 }
865 assert!(crate::malloc_size(&set) > 3000);
867 }
868
869 #[test]
870 fn special_malloc_size_of_0() {
871 struct Data<P> {
872 phantom: std::marker::PhantomData<P>,
873 }
874
875 malloc_size_of_is_0!(any: Data<P>);
876
877 assert_eq!(crate::malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
879 }
880
881 #[test]
882 fn constant_size() {
883 struct AlwaysTwo(Vec<u8>);
884
885 impl MallocSizeOf for AlwaysTwo {
886 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
887 self.0.size_of(ops)
888 }
889 fn constant_size() -> Option<usize> {
890 Some(2)
891 }
892 }
893
894 assert_eq!(AlwaysTwo::constant_size(), Some(2));
895 assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
896 assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
897 assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
898 assert_eq!(Result::<(), ()>::constant_size(), Some(0));
899 assert_eq!(<(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(), Some(2 + 2));
900 assert_eq!(Option::<u8>::constant_size(), Some(0));
901 assert_eq!(<&String>::constant_size(), Some(0));
902
903 assert_eq!(<String>::constant_size(), None);
904 assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
905 assert_eq!(Result::<(), String>::constant_size(), None);
906 assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
907 }
908}