rspack_allocative/
visitor.rs

1/*
2 * Copyright (c) Meta Platforms, Inc. and affiliates.
3 *
4 * This source code is dual-licensed under either the MIT license found in the
5 * LICENSE-MIT file in the root directory of this source tree or the Apache
6 * License, Version 2.0 found in the LICENSE-APACHE file in the root directory
7 * of this source tree. You may select, at your option, one of the
8 * above-listed licenses.
9 */
10
11use std::mem;
12
13use crate::allocative_trait::Allocative;
14use crate::impls::common::CAPACITY_NAME;
15use crate::impls::common::DATA_NAME;
16use crate::impls::common::KEY_NAME;
17use crate::impls::common::UNUSED_CAPACITY_NAME;
18use crate::impls::common::VALUE_NAME;
19use crate::key::Key;
20
21/// Actual implementation of the visitor.
22///
23/// At the moment there's only one implementation, the one which generates flame graph,
24/// and this trait is crate-private. This may change in the future.
25pub(crate) trait VisitorImpl {
26    /// Enter simple field like `u32`.
27    /// All sizes are in bytes.
28    fn enter_inline_impl(&mut self, name: Key, size: usize, parent: NodeKind);
29    /// Enter field which points to heap-allocated unique memory (e.g. `Box<T>`).
30    fn enter_unique_impl(&mut self, name: Key, size: usize, parent: NodeKind);
31    /// Enter field which points to heap-allocated shared memory (e.g. `Arc<T>`).
32    /// This function returns `false` if pointee already visited.
33    #[must_use]
34    fn enter_shared_impl(
35        &mut self,
36        name: Key,
37        size: usize,
38        ptr: *const (),
39        parent: NodeKind,
40    ) -> bool;
41
42    /// Exit the field. Each `enter_` must be matched by `exit_`.
43    /// `Visitor` wrapper guarantees that.
44    fn exit_inline_impl(&mut self);
45    fn exit_unique_impl(&mut self);
46    fn exit_shared_impl(&mut self);
47    // Exit "root" visitor.
48    fn exit_root_impl(&mut self);
49}
50
51#[derive(Copy, Clone)]
52pub(crate) enum NodeKind {
53    Inline,
54    Unique,
55    Shared,
56    Root,
57}
58
59#[must_use] // Must call `.exit()`.
60pub struct Visitor<'a> {
61    pub(crate) visitor: &'a mut dyn VisitorImpl,
62    pub(crate) node_kind: NodeKind,
63}
64
65impl Drop for Visitor<'_> {
66    fn drop(&mut self) {
67        self.exit_impl();
68    }
69}
70
71impl<'a> Visitor<'a> {
72    pub fn enter<'b>(&'b mut self, name: Key, size: usize) -> Visitor<'b>
73    where
74        'a: 'b,
75    {
76        self.visitor.enter_inline_impl(name, size, self.node_kind);
77        Visitor {
78            visitor: self.visitor,
79            node_kind: NodeKind::Inline,
80        }
81    }
82
83    pub fn enter_unique<'b>(&'b mut self, name: Key, size: usize) -> Visitor<'b>
84    where
85        'a: 'b,
86    {
87        self.visitor.enter_unique_impl(name, size, self.node_kind);
88        Visitor {
89            visitor: self.visitor,
90            node_kind: NodeKind::Unique,
91        }
92    }
93
94    /// Enter a field containing a shared pointer.
95    ///
96    /// This functions does nothing and returns `None`
97    /// if pointee (`ptr` argument) was previously visited.
98    pub fn enter_shared<'b>(
99        &'b mut self,
100        name: Key,
101        size: usize,
102        ptr: *const (),
103    ) -> Option<Visitor<'b>>
104    where
105        'a: 'b,
106    {
107        if self
108            .visitor
109            .enter_shared_impl(name, size, ptr, self.node_kind)
110        {
111            Some(Visitor {
112                visitor: self.visitor,
113                node_kind: NodeKind::Shared,
114            })
115        } else {
116            None
117        }
118    }
119
120    /// This function is typically called as the first function of an `Allocative`
121    /// implementation to record self.
122    pub fn enter_self_sized<'b, T>(&'b mut self) -> Visitor<'b>
123    where
124        'a: 'b,
125    {
126        self.enter(Key::for_type_name::<T>(), mem::size_of::<T>())
127    }
128
129    /// This function is typically called as first function of an `Allocative`
130    /// implementation to record self.
131    pub fn enter_self<'b, T: ?Sized>(&'b mut self, this: &T) -> Visitor<'b>
132    where
133        'a: 'b,
134    {
135        self.enter(Key::for_type_name::<T>(), mem::size_of_val(this))
136    }
137
138    /// Visit simple sized field (e.g. `u32`) without descending into children.
139    pub fn visit_simple<'b>(&'b mut self, name: Key, size: usize)
140    where
141        'a: 'b,
142    {
143        self.enter(name, size).exit();
144    }
145
146    /// Visit simple sized field (e.g. `u32`) without descending into children.
147    pub fn visit_simple_sized<'b, T>(&'b mut self)
148    where
149        'a: 'b,
150    {
151        self.enter_self_sized::<T>().exit();
152    }
153
154    pub fn visit_field<'b, T: Allocative + ?Sized>(&'b mut self, name: Key, field: &T)
155    where
156        'a: 'b,
157    {
158        self.visit_field_with(name, mem::size_of_val::<T>(field), |visitor| {
159            field.visit(visitor);
160        })
161    }
162
163    /// Similar to `visit_field` but instead of calling [`Allocative::visit`] for
164    /// whichever is the field type, you can provide a custom closure to call
165    /// instead.
166    ///
167    /// Useful if the field type does not implement [`Allocative`].
168    pub fn visit_field_with<'b, 'f, F: for<'c, 'd> FnOnce(&'d mut Visitor<'c>)>(
169        &'b mut self,
170        name: Key,
171        field_size: usize,
172        visit: F,
173    ) {
174        let mut visitor = self.enter(name, field_size);
175        visit(&mut visitor);
176        visitor.exit();
177    }
178
179    pub fn visit_slice<'b, T: Allocative>(&'b mut self, slice: &[T])
180    where
181        'a: 'b,
182    {
183        self.visit_iter(slice);
184    }
185
186    pub fn visit_iter<'b, 'i, T: Allocative + 'i, I: IntoIterator<Item = &'i T>>(
187        &'b mut self,
188        iter: I,
189    ) where
190        'a: 'b,
191    {
192        if !mem::needs_drop::<T>() || mem::size_of::<T>() == 0 {
193            // `T` has no pointers it owns.
194            self.visit_simple(
195                Key::for_type_name::<T>(),
196                mem::size_of::<T>() * iter.into_iter().count(),
197            );
198        } else {
199            for item in iter {
200                item.visit(self);
201            }
202        }
203    }
204
205    pub fn visit_vec_like_body<'b, T>(&'b mut self, data: &[T], capacity: usize)
206    where
207        'a: 'b,
208        T: Allocative,
209    {
210        self.visit_field_with(CAPACITY_NAME, mem::size_of::<T>() * capacity, |visitor| {
211            visitor.visit_slice(data);
212            visitor.visit_simple(
213                UNUSED_CAPACITY_NAME,
214                mem::size_of::<T>() * capacity.wrapping_sub(data.len()),
215            );
216        })
217    }
218
219    pub fn visit_generic_map_fields<'b, 'x, K: Allocative + 'x, V: Allocative + 'x>(
220        &'b mut self,
221        entries: impl IntoIterator<Item = (&'x K, &'x V)>,
222    ) {
223        self.visit_field_with(DATA_NAME, mem::size_of::<*const ()>(), move |visitor| {
224            for (k, v) in entries {
225                visitor.visit_field(KEY_NAME, k);
226                visitor.visit_field(VALUE_NAME, v);
227            }
228        })
229    }
230
231    pub fn visit_generic_set_fields<'b, 'x, K: Allocative + 'x>(
232        &'b mut self,
233        entries: impl IntoIterator<Item = &'x K>,
234    ) where
235        'a: 'b,
236    {
237        self.visit_field_with(DATA_NAME, mem::size_of::<*const ()>(), |visitor| {
238            for k in entries {
239                visitor.visit_field(KEY_NAME, k);
240            }
241        })
242    }
243
244    fn exit_impl(&mut self) {
245        match self.node_kind {
246            NodeKind::Inline => self.visitor.exit_inline_impl(),
247            NodeKind::Unique => self.visitor.exit_unique_impl(),
248            NodeKind::Shared => self.visitor.exit_shared_impl(),
249            NodeKind::Root => self.visitor.exit_root_impl(),
250        }
251    }
252
253    pub fn exit(mut self) {
254        self.exit_impl();
255        // Prevent `drop`.
256        mem::forget(self);
257    }
258}