1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
use crate::{Context, SizeOf};
use alloc::{
    boxed::Box,
    rc::{Rc, Weak as RcWeak},
    sync::{Arc, Weak as ArcWeak},
};
use core::{mem::size_of_val, ptr::NonNull, sync::atomic::AtomicPtr};

// TODO: Do we want to traverse all *accessible* memory or all *owned* memory?
impl<T> SizeOf for &T
where
    T: SizeOf + ?Sized,
{
    #[inline]
    fn size_of_children(&self, _context: &mut Context) {
        // // Only record the data behind the ref we've never seen it before
        // if context.insert_ref(self) {
        //     context.add(size_of_val(*self));
        //     T::total_size_of_children(self, context);
        // }
    }
}

impl<T> SizeOf for &mut T
where
    T: SizeOf + ?Sized,
{
    #[inline]
    fn size_of_children(&self, _context: &mut Context) {
        // Mutable references are exclusive so there should only ever be one of
        // them
        //
        // context.add(size_of_val(*self));
        // T::total_size_of_children(self, context);
    }
}

impl<T: ?Sized> SizeOf for *const T {
    #[inline]
    fn size_of_children(&self, _context: &mut Context) {}
}

impl<T: ?Sized> SizeOf for *mut T {
    #[inline]
    fn size_of_children(&self, _context: &mut Context) {}
}

impl<T> SizeOf for Box<T>
where
    T: SizeOf + ?Sized,
{
    fn size_of_children(&self, context: &mut Context) {
        context
            .add(size_of_val(self.as_ref()))
            .add_distinct_allocation();
        T::size_of_children(self, context);
    }
}

impl<T> SizeOf for Rc<T>
where
    T: SizeOf + ?Sized,
{
    fn size_of_children(&self, context: &mut Context) {
        if context.insert_rc(self) {
            context
                .shared(|ctx| {
                    ctx.add(size_of_val(self.as_ref()));
                    T::size_of_children(self, ctx);
                })
                .add_distinct_allocation();
        }
    }
}

// Weak refs aren't owned
// TODO: Should we record the data pointed to by weak refs as shared?
impl<T: ?Sized> SizeOf for RcWeak<T> {
    #[inline]
    fn size_of_children(&self, _context: &mut Context) {}
}

impl<T> SizeOf for Arc<T>
where
    T: SizeOf + ?Sized,
{
    fn size_of_children(&self, context: &mut Context) {
        if context.insert_arc(self) {
            context
                .shared(|ctx| {
                    ctx.add(size_of_val(self.as_ref()));
                    T::size_of_children(self, ctx);
                })
                .add_distinct_allocation();
        }
    }
}

// Weak refs aren't owned
// TODO: Should we record the data pointed to by weak refs as shared?
impl<T: ?Sized> SizeOf for ArcWeak<T> {
    #[inline]
    fn size_of_children(&self, _context: &mut Context) {}
}

impl_total_size_childless! {
    NonNull<T>,
    AtomicPtr<T>,
}