tracking_allocator/
tracing.rs

1use std::{any::TypeId, marker::PhantomData, ptr::addr_of};
2
3use tracing::{Dispatch, Id, Subscriber};
4use tracing_subscriber::{layer::Context, registry::LookupSpan, Layer};
5
6use crate::token::UnsafeAllocationGroupToken;
7
8pub(crate) struct WithAllocationGroup {
9    with_allocation_group: fn(&Dispatch, &Id, UnsafeAllocationGroupToken),
10}
11
12impl WithAllocationGroup {
13    pub fn with_allocation_group(
14        &self,
15        dispatch: &Dispatch,
16        id: &Id,
17        unsafe_token: UnsafeAllocationGroupToken,
18    ) {
19        (self.with_allocation_group)(dispatch, id, unsafe_token);
20    }
21}
22
23/// [`AllocationLayer`] is a [`tracing_subscriber::Layer`] that handles entering and exiting an allocation
24/// group as the span it is attached to is itself entered and exited.
25///
26/// More information on using this layer can be found in the examples, or directly in the
27/// `tracing_subscriber` docs, found [here][tracing_subscriber::layer].
28#[cfg_attr(docsrs, doc(cfg(feature = "tracing-compat")))]
29pub struct AllocationLayer<S> {
30    ctx: WithAllocationGroup,
31    _subscriber: PhantomData<fn(S)>,
32}
33
34impl<S> AllocationLayer<S>
35where
36    S: Subscriber + for<'span> LookupSpan<'span>,
37{
38    /// Creates a new [`AllocationLayer`].
39    #[must_use]
40    pub fn new() -> Self {
41        let ctx = WithAllocationGroup {
42            with_allocation_group: Self::with_allocation_group,
43        };
44
45        Self {
46            ctx,
47            _subscriber: PhantomData,
48        }
49    }
50
51    fn with_allocation_group(
52        dispatch: &Dispatch,
53        id: &Id,
54        unsafe_token: UnsafeAllocationGroupToken,
55    ) {
56        let subscriber = dispatch
57            .downcast_ref::<S>()
58            .expect("subscriber should downcast to expected type; this is a bug!");
59        let span = subscriber
60            .span(id)
61            .expect("registry should have a span for the current ID");
62
63        span.extensions_mut().insert(unsafe_token);
64    }
65}
66
67impl<S> Layer<S> for AllocationLayer<S>
68where
69    S: Subscriber + for<'a> LookupSpan<'a>,
70{
71    fn on_enter(&self, id: &Id, ctx: Context<'_, S>) {
72        if let Some(span_ref) = ctx.span(id) {
73            if let Some(token) = span_ref
74                .extensions_mut()
75                .get_mut::<UnsafeAllocationGroupToken>()
76            {
77                token.enter();
78            }
79        }
80    }
81
82    fn on_exit(&self, id: &Id, ctx: Context<'_, S>) {
83        if let Some(span_ref) = ctx.span(id) {
84            if let Some(token) = span_ref
85                .extensions_mut()
86                .get_mut::<UnsafeAllocationGroupToken>()
87            {
88                token.exit();
89            }
90        }
91    }
92
93    unsafe fn downcast_raw(&self, id: TypeId) -> Option<*const ()> {
94        match id {
95            id if id == TypeId::of::<Self>() => Some(addr_of!(self).cast::<()>()),
96            id if id == TypeId::of::<WithAllocationGroup>() => {
97                Some(addr_of!(self.ctx).cast::<()>())
98            }
99            _ => None,
100        }
101    }
102}
103
104impl<S> Default for AllocationLayer<S>
105where
106    S: Subscriber + for<'span> LookupSpan<'span>,
107{
108    fn default() -> Self {
109        AllocationLayer::new()
110    }
111}