aiscript_arena/context.rs
1use alloc::{boxed::Box, vec::Vec};
2use core::{
3 cell::{Cell, RefCell},
4 mem,
5 ops::Deref,
6 ptr::NonNull,
7};
8
9use crate::{
10 Gc,
11 collect::Collect,
12 metrics::Metrics,
13 types::{GcBox, GcBoxHeader, GcBoxInner, GcColor, Invariant},
14};
15
16/// Handle value given by arena callbacks during construction and mutation. Allows allocating new
17/// `Gc` pointers and internally mutating values held by `Gc` pointers.
18#[repr(transparent)]
19pub struct Mutation<'gc> {
20 context: Context,
21 _invariant: Invariant<'gc>,
22}
23
24impl<'gc> Mutation<'gc> {
25 #[inline]
26 pub fn metrics(&self) -> &Metrics {
27 self.context.metrics()
28 }
29
30 /// IF we are in the marking phase AND the `parent` pointer is colored black AND the `child` (if
31 /// given) is colored white, then change the `parent` color to gray and enqueue it for tracing.
32 ///
33 /// This operation is known as a "backwards write barrier". Calling this method is one of the
34 /// safe ways for the value in the `parent` pointer to use internal mutability to adopt the
35 /// `child` pointer without invalidating the color invariant.
36 ///
37 /// If the `child` parameter is given, then calling this method ensures that the `parent`
38 /// pointer may safely adopt the `child` pointer. If no `child` is given, then calling this
39 /// method is more general, and it ensures that the `parent` pointer may adopt *any* child
40 /// pointer(s) before collection is next triggered.
41 #[inline]
42 pub fn backward_barrier(&self, parent: Gc<'gc, ()>, child: Option<Gc<'gc, ()>>) {
43 self.context.backward_barrier(
44 unsafe { GcBox::erase(parent.ptr) },
45 child.map(|p| unsafe { GcBox::erase(p.ptr) }),
46 )
47 }
48
49 /// IF we are in the marking phase AND the `parent` pointer (if given) is colored black, AND
50 /// the `child` is colored white, then immediately change the `child` to gray and enqueue it
51 /// for tracing.
52 ///
53 /// This operation is known as a "forwards write barrier". Calling this method is one of the
54 /// safe ways for the value in the `parent` pointer to use internal mutability to adopt the
55 /// `child` pointer without invalidating the color invariant.
56 ///
57 /// If the `parent` parameter is given, then calling this method ensures that the `parent`
58 /// pointer may safely adopt the `child` pointer. If no `parent` is given, then calling this
59 /// method is more general, and it ensures that the `child` pointer may be adopted by *any*
60 /// parent pointer(s) before collection is next triggered.
61 #[inline]
62 pub fn forward_barrier(&self, parent: Option<Gc<'gc, ()>>, child: Gc<'gc, ()>) {
63 self.context
64 .forward_barrier(parent.map(|p| unsafe { GcBox::erase(p.ptr) }), unsafe {
65 GcBox::erase(child.ptr)
66 })
67 }
68
69 #[inline]
70 pub(crate) fn allocate<T: Collect + 'gc>(&self, t: T) -> NonNull<GcBoxInner<T>> {
71 self.context.allocate(t)
72 }
73
74 #[inline]
75 pub(crate) fn upgrade(&self, gc_box: GcBox) -> bool {
76 self.context.upgrade(gc_box)
77 }
78}
79
80/// Handle value given to finalization callbacks in `MarkedArena`.
81///
82/// Derefs to `Mutation<'gc>` to allow for arbitrary mutation, but adds additional powers to examine
83/// the state of the fully marked arena.
84#[repr(transparent)]
85pub struct Finalization<'gc> {
86 context: Context,
87 _invariant: Invariant<'gc>,
88}
89
90impl<'gc> Deref for Finalization<'gc> {
91 type Target = Mutation<'gc>;
92
93 fn deref(&self) -> &Self::Target {
94 // SAFETY: Finalization and Mutation are #[repr(transparent)]
95 unsafe { mem::transmute::<&Self, &Mutation>(self) }
96 }
97}
98
99impl Finalization<'_> {
100 #[inline]
101 pub(crate) fn resurrect(&self, gc_box: GcBox) {
102 self.context.resurrect(gc_box)
103 }
104}
105
106/// Handle value given by arena callbacks during garbage collection, which must be passed through
107/// `Collect::trace` implementations.
108#[repr(transparent)]
109pub struct Collection {
110 context: Context,
111}
112
113impl Collection {
114 #[inline]
115 pub fn metrics(&self) -> &Metrics {
116 self.context.metrics()
117 }
118
119 #[inline]
120 pub(crate) fn trace(&self, gc_box: GcBox) {
121 self.context.trace(gc_box)
122 }
123
124 #[inline]
125 pub(crate) fn trace_weak(&self, gc_box: GcBox) {
126 self.context.trace_weak(gc_box)
127 }
128}
129
130#[derive(Debug, Copy, Clone, Eq, PartialEq)]
131pub(crate) enum Phase {
132 Mark,
133 Sweep,
134 Sleep,
135 Drop,
136}
137
138#[derive(Debug, Copy, Clone, Eq, PartialEq)]
139pub(crate) enum EarlyStop {
140 BeforeSweep,
141 AfterSweep,
142}
143
144pub(crate) struct Context {
145 metrics: Metrics,
146 phase: Cell<Phase>,
147 #[cfg(feature = "tracing")]
148 phase_span: Cell<tracing::Span>,
149
150 root_needs_trace: Cell<bool>,
151
152 // A linked list of all allocated `GcBox`es.
153 all: Cell<Option<GcBox>>,
154
155 // A copy of the head of `all` at the end of `Phase::Mark`.
156 // During `Phase::Sweep`, we free all white allocations on this list.
157 // Any allocations created *during* `Phase::Sweep` will be added to `all`,
158 // but `sweep` will *not* be updated. This ensures that we keep allocations
159 // alive until we've had a chance to trace them.
160 sweep: Cell<Option<GcBox>>,
161
162 // The most recent black object that we encountered during `Phase::Sweep`.
163 // When we free objects, we update this `GcBox.next` to remove them from
164 // the linked list.
165 sweep_prev: Cell<Option<GcBox>>,
166
167 /// A queue of gray objects, used during `Phase::Mark`.
168 /// This holds traceable objects that have yet to be traced.
169 /// When we enter `Phase::Mark`, we push `root` to this queue.
170 gray: RefCell<Vec<GcBox>>,
171
172 // A queue of gray objects that became gray as a result
173 // of a write barrier.
174 gray_again: RefCell<Vec<GcBox>>,
175}
176
177impl Drop for Context {
178 fn drop(&mut self) {
179 struct DropAll<'a>(&'a Metrics, Option<GcBox>);
180
181 impl Drop for DropAll<'_> {
182 fn drop(&mut self) {
183 if let Some(gc_box) = self.1.take() {
184 let mut drop_resume = DropAll(self.0, Some(gc_box));
185 while let Some(gc_box) = drop_resume.1.take() {
186 let header = gc_box.header();
187 drop_resume.1 = header.next();
188 self.0.mark_gc_deallocated(header.size_of_box());
189 // SAFETY: the context owns its GC'd objects
190 unsafe { free_gc_box(gc_box) }
191 }
192 }
193 }
194 }
195
196 let _guard = PhaseGuard::enter(self, Some(Phase::Drop));
197 DropAll(&self.metrics, self.all.get());
198 }
199}
200
201impl Context {
202 pub(crate) unsafe fn new() -> Context {
203 let metrics = Metrics::new();
204 Context {
205 phase: Cell::new(Phase::Sleep),
206 #[cfg(feature = "tracing")]
207 phase_span: Cell::new(PhaseGuard::span_for(&metrics, Phase::Sleep)),
208 metrics,
209 root_needs_trace: Cell::new(true),
210 all: Cell::new(None),
211 sweep: Cell::new(None),
212 sweep_prev: Cell::new(None),
213 gray: RefCell::new(Vec::new()),
214 gray_again: RefCell::new(Vec::new()),
215 }
216 }
217
218 #[inline]
219 pub(crate) unsafe fn mutation_context<'gc>(&self) -> &Mutation<'gc> {
220 unsafe { mem::transmute::<&Self, &Mutation>(self) }
221 }
222
223 #[inline]
224 fn collection_context(&self) -> &Collection {
225 // SAFETY: `Collection` is `repr(transparent)`
226 unsafe { mem::transmute::<&Self, &Collection>(self) }
227 }
228
229 #[inline]
230 pub(crate) unsafe fn finalization_context<'gc>(&self) -> &Finalization<'gc> {
231 unsafe { mem::transmute::<&Self, &Finalization>(self) }
232 }
233
234 #[inline]
235 pub(crate) fn metrics(&self) -> &Metrics {
236 &self.metrics
237 }
238
239 #[inline]
240 pub(crate) fn root_barrier(&self) {
241 if self.phase.get() == Phase::Mark {
242 self.root_needs_trace.set(true);
243 }
244 }
245
246 #[inline]
247 pub(crate) fn phase(&self) -> Phase {
248 self.phase.get()
249 }
250
251 #[inline]
252 pub(crate) fn gray_remaining(&self) -> bool {
253 !self.gray.borrow().is_empty()
254 || !self.gray_again.borrow().is_empty()
255 || self.root_needs_trace.get()
256 }
257
258 // Do some collection work until either the debt goes down below the target amount or we have
259 // finished the gc sweep phase. The unit of "work" here is a byte count of objects either turned
260 // black or freed, so to completely collect a heap with 1000 bytes of objects should take 1000
261 // units of work, whatever percentage of them are live or not.
262 //
263 // In order for this to be safe, at the time of call no `Gc` pointers can be live that are not
264 // reachable from the given root object.
265 //
266 // If we are currently in `Phase::Sleep`, this will transition the collector to `Phase::Mark`.
267 pub(crate) unsafe fn do_collection<R: Collect + ?Sized>(
268 &self,
269 root: &R,
270 target_debt: f64,
271 early_stop: Option<EarlyStop>,
272 ) {
273 self.do_collection_inner(root, target_debt, early_stop)
274 }
275
276 fn do_collection_inner<R: Collect + ?Sized>(
277 &self,
278 root: &R,
279 mut target_debt: f64,
280 early_stop: Option<EarlyStop>,
281 ) {
282 let mut entered = PhaseGuard::enter(self, None);
283
284 if !(self.metrics.allocation_debt() > target_debt) {
285 entered.log_progress("GC: paused");
286 return;
287 }
288
289 loop {
290 match self.phase.get() {
291 Phase::Sleep => {
292 // Immediately enter the mark phase; no need to update metrics here.
293 entered.switch(Phase::Mark);
294 continue;
295 }
296 Phase::Mark => {
297 // We look for an object first in the normal gray queue, then the "gray again"
298 // queue. Objects from the normal gray queue count as regular work, but objects
299 // which are gray a second time have already been counted as work, so we don't
300 // double count them. Processing "gray again" objects later also gives them more
301 // time to be mutated again without triggering another write barrier.
302 let next_gray = if let Some(gc_box) = self.gray.borrow_mut().pop() {
303 self.metrics.mark_gc_traced(gc_box.header().size_of_box());
304 Some(gc_box)
305 } else {
306 self.gray_again.borrow_mut().pop()
307 };
308
309 if let Some(gc_box) = next_gray {
310 // If we have an object in the gray queue, take one, trace it, and turn it
311 // black.
312
313 // Our `Collect::trace` call may panic, and if it does the object will be
314 // lost from the gray queue but potentially incompletely traced. By catching
315 // a panic during `Arena::collect()`, this could lead to memory unsafety.
316 //
317 // So, if the `Collect::trace` call panics, we need to add the popped object
318 // back to the `gray_again` queue. If the panic is caught, this will maybe
319 // give it some time to not panic before attempting to collect it again, and
320 // also this doesn't invalidate the collection debt math.
321 struct DropGuard<'a> {
322 cx: &'a Context,
323 gc_box: GcBox,
324 }
325
326 impl Drop for DropGuard<'_> {
327 fn drop(&mut self) {
328 self.cx.gray_again.borrow_mut().push(self.gc_box);
329 }
330 }
331
332 let guard = DropGuard { cx: self, gc_box };
333 debug_assert!(gc_box.header().is_live());
334 unsafe { gc_box.trace_value(self.collection_context()) }
335 gc_box.header().set_color(GcColor::Black);
336 mem::forget(guard);
337 } else if self.root_needs_trace.get() {
338 // We treat the root object as gray if `root_needs_trace` is set, and we
339 // process it at the end of the gray queue for the same reason as the "gray
340 // again" objects.
341 root.trace(self.collection_context());
342 self.root_needs_trace.set(false);
343 } else if early_stop == Some(EarlyStop::BeforeSweep) {
344 target_debt = f64::INFINITY;
345 } else {
346 // If we have no gray objects left, we enter the sweep phase.
347 entered.switch(Phase::Sweep);
348
349 // Set `sweep to the current head of our `all` linked list. Any new
350 // allocations during the newly-entered `Phase:Sweep` will update `all`,
351 // but will *not* be reachable from `this.sweep`.
352 self.sweep.set(self.all.get());
353
354 // No need to update metrics here.
355 continue;
356 }
357 }
358 Phase::Sweep => {
359 if early_stop == Some(EarlyStop::AfterSweep) {
360 target_debt = f64::INFINITY;
361 } else if let Some(mut sweep) = self.sweep.get() {
362 let sweep_header = sweep.header();
363
364 let next_box = sweep_header.next();
365 self.sweep.set(next_box);
366
367 match sweep_header.color() {
368 // If the next object in the sweep portion of the main list is white, we
369 // need to remove it from the main object list and destruct it.
370 GcColor::White => {
371 if let Some(sweep_prev) = self.sweep_prev.get() {
372 sweep_prev.header().set_next(next_box);
373 } else {
374 // If `sweep_prev` is None, then the sweep pointer is also the
375 // beginning of the main object list, so we need to adjust it.
376 debug_assert_eq!(self.all.get(), Some(sweep));
377 self.all.set(next_box);
378 }
379 self.metrics.mark_gc_deallocated(sweep_header.size_of_box());
380
381 // SAFETY: this object is white, and wasn't traced by a `GcWeak`
382 // during this cycle, meaning it cannot have either strong or weak
383 // pointers, so we can drop the whole object.
384 unsafe { free_gc_box(sweep) }
385 }
386 // Keep the `GcBox` as part of the linked list if we traced a weak
387 // pointer to it. The weak pointer still needs access to the `GcBox` to
388 // be able to check if the object is still alive. We can only deallocate
389 // the `GcBox`, once there are no weak pointers left.
390 GcColor::WhiteWeak => {
391 self.sweep_prev.set(Some(sweep));
392 sweep_header.set_color(GcColor::White);
393 if sweep_header.is_live() {
394 sweep_header.set_live(false);
395 // SAFETY: Since this object is white, that means there are no
396 // more strong pointers to this object, only weak pointers, so
397 // we can safely drop its contents.
398 unsafe { sweep.drop_in_place() }
399 }
400 }
401 // If the next object in the sweep portion of the main list is black, we
402 // need to keep it but turn it back white.
403 GcColor::Black => {
404 self.sweep_prev.set(Some(sweep));
405 self.metrics.mark_gc_remembered(sweep_header.size_of_box());
406 sweep_header.set_color(GcColor::White);
407 }
408 // No gray objects should be in this part of the main list, they should
409 // be added to the beginning of the list before the sweep pointer, so it
410 // should not be possible for us to encounter them here.
411 GcColor::Gray => {
412 debug_assert!(false, "unexpected gray object in sweep list")
413 }
414 }
415 } else {
416 self.sweep_prev.set(None);
417 self.root_needs_trace.set(true);
418 entered.switch(Phase::Sleep);
419 self.metrics.start_cycle();
420 // Collection is done, forcibly exit the loop.
421 target_debt = f64::INFINITY;
422 }
423 }
424 Phase::Drop => unreachable!(),
425 }
426
427 if !(self.metrics.allocation_debt() > target_debt) {
428 entered.log_progress("GC: yielding...");
429 return;
430 }
431 }
432 }
433
434 fn allocate<T: Collect>(&self, t: T) -> NonNull<GcBoxInner<T>> {
435 let header = GcBoxHeader::new::<T>();
436 header.set_next(self.all.get());
437 header.set_live(true);
438 header.set_needs_trace(T::needs_trace());
439
440 let alloc_size = header.size_of_box();
441 self.metrics.mark_gc_allocated(alloc_size);
442
443 // Make the generated code easier to optimize into `T` being constructed in place or at the
444 // very least only memcpy'd once.
445 // For more information, see: https://github.com/kyren/gc-arena/pull/14
446 let (gc_box, ptr) = unsafe {
447 let mut uninitialized = Box::new(mem::MaybeUninit::<GcBoxInner<T>>::uninit());
448 core::ptr::write(uninitialized.as_mut_ptr(), GcBoxInner::new(header, t));
449 let ptr = NonNull::new_unchecked(Box::into_raw(uninitialized) as *mut GcBoxInner<T>);
450 (GcBox::erase(ptr), ptr)
451 };
452
453 self.all.set(Some(gc_box));
454 if self.phase.get() == Phase::Sweep && self.sweep_prev.get().is_none() {
455 self.sweep_prev.set(self.all.get());
456 }
457
458 ptr
459 }
460
461 #[inline]
462 fn backward_barrier(&self, parent: GcBox, child: Option<GcBox>) {
463 // During the marking phase, if we are mutating a black object, we may add a white object to
464 // it and invalidate the invariant that black objects may not point to white objects. Turn
465 // the black parent object gray to prevent this.
466 //
467 // NOTE: This also adds the pointer to the gray_again queue even if `header.needs_trace()`
468 // is false, but this is not harmful (just wasteful). There's no reason to call a barrier on
469 // a pointer that can't adopt other pointers, so we skip the check.
470 if self.phase.get() == Phase::Mark
471 && parent.header().color() == GcColor::Black
472 && child
473 .map(|c| matches!(c.header().color(), GcColor::White | GcColor::WhiteWeak))
474 .unwrap_or(true)
475 {
476 // Outline the actual barrier code (which is somewhat expensive and won't be executed
477 // often) to promote the inlining of the write barrier.
478 #[cold]
479 fn barrier(this: &Context, parent: GcBox) {
480 parent.header().set_color(GcColor::Gray);
481 this.gray_again.borrow_mut().push(parent);
482 }
483 barrier(self, parent);
484 }
485 }
486
487 #[inline]
488 fn forward_barrier(&self, parent: Option<GcBox>, child: GcBox) {
489 // During the marking phase, if we are mutating a black object, we may add a white object
490 // to it and invalidate the invariant that black objects may not point to white objects.
491 // Immediately trace the child white object to turn it gray (or black) to prevent this.
492 if self.phase.get() == Phase::Mark
493 && parent
494 .map(|p| p.header().color() == GcColor::Black)
495 .unwrap_or(true)
496 {
497 self.trace(child);
498 }
499 }
500
501 #[inline]
502 fn trace(&self, gc_box: GcBox) {
503 let header = gc_box.header();
504 match header.color() {
505 GcColor::Black | GcColor::Gray => {}
506 GcColor::White | GcColor::WhiteWeak => {
507 if header.needs_trace() {
508 // A white traceable object is not in the gray queue, becomes gray and enters
509 // the normal gray queue.
510 header.set_color(GcColor::Gray);
511 debug_assert!(header.is_live());
512 self.gray.borrow_mut().push(gc_box);
513 } else {
514 // A white object that doesn't need tracing simply becomes black.
515 header.set_color(GcColor::Black);
516 self.metrics.mark_gc_traced(header.size_of_box());
517 }
518 }
519 }
520 }
521
522 #[inline]
523 fn trace_weak(&self, gc_box: GcBox) {
524 let header = gc_box.header();
525 if header.color() == GcColor::White {
526 header.set_color(GcColor::WhiteWeak);
527 }
528 }
529
530 /// Determines whether or not a Gc pointer is safe to be upgraded.
531 /// This is used by weak pointers to determine if it can safely upgrade to a strong pointer.
532 #[inline]
533 fn upgrade(&self, gc_box: GcBox) -> bool {
534 let header = gc_box.header();
535
536 // This object has already been freed, definitely not safe to upgrade.
537 if !header.is_live() {
538 return false;
539 }
540
541 // Consider the different possible phases of the GC:
542 // * In `Phase::Sleep`, the GC is not running, so we can upgrade.
543 // If the newly-created `Gc` or `GcCell` survives the current `arena.mutate`
544 // call, then the situtation is equivalent to having copied an existing `Gc`/`GcCell`,
545 // or having created a new allocation.
546 //
547 // * In `Phase::Mark`:
548 // If the newly-created `Gc` or `GcCell` survives the current `arena.mutate`
549 // call, then it must have been stored somewhere, triggering a write barrier.
550 // This will ensure that the new `Gc`/`GcCell` gets traced (if it's now reachable)
551 // before we transition to `Phase::Sweep`.
552 //
553 // * In `Phase::Sweep`:
554 // If the allocation is `WhiteWeak`, then it's impossible for it to have been freshly-
555 // created during this `Phase::Sweep`. `WhiteWeak` is only set when a white `GcWeak/
556 // GcWeakCell` is traced. A `GcWeak/GcWeakCell` must be created from an existing `Gc/
557 // GcCell` via `downgrade()`, so `WhiteWeak` means that a `GcWeak` / `GcWeakCell` existed
558 // during the last `Phase::Mark.`
559 //
560 // Therefore, a `WhiteWeak` object is guaranteed to be deallocated during this
561 // `Phase::Sweep`, and we must not upgrade it.
562 //
563 // Conversely, it's always safe to upgrade a white object that is not `WhiteWeak`.
564 // In order to call `upgrade`, you must have a `GcWeak/GcWeakCell`. Since it is
565 // not `WhiteWeak` there cannot have been any `GcWeak/GcWeakCell`s during the
566 // last `Phase::Mark`, so the weak pointer must have been created during this
567 // `Phase::Sweep`. This is only possible if the underlying allocation was freshly-created
568 // - if the allocation existed during `Phase::Mark` but was not traced, then it
569 // must have been unreachable, which means that the user wouldn't have been able to call
570 // `downgrade`. Therefore, we can safely upgrade, knowing that the object will not be
571 // freed during this phase, despite being white.
572 if self.phase.get() == Phase::Sweep && header.color() == GcColor::WhiteWeak {
573 return false;
574 }
575 true
576 }
577
578 #[inline]
579 fn resurrect(&self, gc_box: GcBox) {
580 let header = gc_box.header();
581 debug_assert_eq!(self.phase.get(), Phase::Mark);
582 debug_assert!(header.is_live());
583 if matches!(header.color(), GcColor::White | GcColor::WhiteWeak) {
584 header.set_color(GcColor::Gray);
585 self.gray.borrow_mut().push(gc_box);
586 }
587 }
588}
589
590// SAFETY: the gc_box must never be accessed after calling this function.
591unsafe fn free_gc_box<'gc>(mut gc_box: GcBox) {
592 unsafe {
593 if gc_box.header().is_live() {
594 // If the alive flag is set, that means we haven't dropped the inner value of this object,
595 gc_box.drop_in_place();
596 }
597 gc_box.dealloc();
598 }
599}
600
601/// Helper type for managing phase transitions.
602struct PhaseGuard<'a> {
603 cx: &'a Context,
604 #[cfg(feature = "tracing")]
605 span: tracing::span::EnteredSpan,
606}
607
608impl Drop for PhaseGuard<'_> {
609 fn drop(&mut self) {
610 #[cfg(feature = "tracing")]
611 {
612 let span = mem::replace(&mut self.span, tracing::Span::none().entered());
613 self.cx.phase_span.set(span.exit());
614 }
615 }
616}
617
618impl<'a> PhaseGuard<'a> {
619 fn enter(cx: &'a Context, phase: Option<Phase>) -> Self {
620 if let Some(phase) = phase {
621 cx.phase.set(phase);
622 }
623
624 Self {
625 cx,
626 #[cfg(feature = "tracing")]
627 span: {
628 let mut span = cx.phase_span.replace(tracing::Span::none());
629 if let Some(phase) = phase {
630 span = Self::span_for(&cx.metrics, phase);
631 }
632 span.entered()
633 },
634 }
635 }
636
637 fn switch(&mut self, phase: Phase) {
638 self.cx.phase.set(phase);
639
640 #[cfg(feature = "tracing")]
641 {
642 let _ = mem::replace(&mut self.span, tracing::Span::none().entered());
643 self.span = Self::span_for(&self.cx.metrics, phase).entered();
644 }
645 }
646
647 fn log_progress(&mut self, #[allow(unused)] message: &str) {
648 // TODO: add more infos here
649 #[cfg(feature = "tracing")]
650 tracing::debug!(
651 target: "aiscript_arena",
652 parent: &self.span,
653 message,
654 phase = tracing::field::debug(self.cx.phase.get()),
655 allocated = self.cx.metrics.total_allocation(),
656 );
657 }
658
659 #[cfg(feature = "tracing")]
660 fn span_for(metrics: &Metrics, phase: Phase) -> tracing::Span {
661 tracing::debug_span!(
662 target: "aiscript_arena",
663 "aiscript_arena",
664 id = metrics.arena_id(),
665 ?phase,
666 )
667 }
668}