size_of/
pointers.rs

1use crate::{Context, SizeOf};
2use alloc::{
3    boxed::Box,
4    rc::{Rc, Weak as RcWeak},
5    sync::{Arc, Weak as ArcWeak},
6};
7use core::{mem::size_of_val, ptr::NonNull, sync::atomic::AtomicPtr};
8
9// TODO: Do we want to traverse all *accessible* memory or all *owned* memory?
10impl<T> SizeOf for &T
11where
12    T: SizeOf + ?Sized,
13{
14    #[inline]
15    fn size_of_children(&self, _context: &mut Context) {
16        // // Only record the data behind the ref we've never seen it before
17        // if context.insert_ref(self) {
18        //     context.add(size_of_val(*self));
19        //     T::total_size_of_children(self, context);
20        // }
21    }
22}
23
24impl<T> SizeOf for &mut T
25where
26    T: SizeOf + ?Sized,
27{
28    #[inline]
29    fn size_of_children(&self, _context: &mut Context) {
30        // Mutable references are exclusive so there should only ever be one of
31        // them
32        //
33        // context.add(size_of_val(*self));
34        // T::total_size_of_children(self, context);
35    }
36}
37
38impl<T: ?Sized> SizeOf for *const T {
39    #[inline]
40    fn size_of_children(&self, _context: &mut Context) {}
41}
42
43impl<T: ?Sized> SizeOf for *mut T {
44    #[inline]
45    fn size_of_children(&self, _context: &mut Context) {}
46}
47
48impl<T> SizeOf for Box<T>
49where
50    T: SizeOf + ?Sized,
51{
52    fn size_of_children(&self, context: &mut Context) {
53        let size = size_of_val(self.as_ref());
54        if size != 0 {
55            context.add(size).add_distinct_allocation();
56        }
57
58        T::size_of_children(self, context);
59    }
60}
61
62impl<T> SizeOf for Rc<T>
63where
64    T: SizeOf + ?Sized,
65{
66    fn size_of_children(&self, context: &mut Context) {
67        if context.insert_rc(self) {
68            context
69                .shared(|ctx| {
70                    ctx.add(size_of_val(self.as_ref()));
71                    T::size_of_children(self, ctx);
72                })
73                .add_distinct_allocation();
74        }
75    }
76}
77
78// Weak refs aren't owned
79// TODO: Should we record the data pointed to by weak refs as shared?
80impl<T: ?Sized> SizeOf for RcWeak<T> {
81    #[inline]
82    fn size_of_children(&self, _context: &mut Context) {}
83}
84
85impl<T> SizeOf for Arc<T>
86where
87    T: SizeOf + ?Sized,
88{
89    fn size_of_children(&self, context: &mut Context) {
90        if context.insert_arc(self) {
91            context
92                .shared(|ctx| {
93                    ctx.add(size_of_val(self.as_ref()));
94                    T::size_of_children(self, ctx);
95                })
96                .add_distinct_allocation();
97        }
98    }
99}
100
101// Weak refs aren't owned
102// TODO: Should we record the data pointed to by weak refs as shared?
103impl<T: ?Sized> SizeOf for ArcWeak<T> {
104    #[inline]
105    fn size_of_children(&self, _context: &mut Context) {}
106}
107
108impl<T: ?Sized> SizeOf for NonNull<T> {
109    #[inline]
110    fn size_of_children(&self, _context: &mut Context) {}
111}
112
113impl_total_size_childless! {
114    AtomicPtr<T>,
115}