backdrop/lib.rs
1#![no_std]
2#![cfg_attr(feature = "doc", feature(doc_auto_cfg))]
3
4//! The `backdrop` crate allows you to customize when and how your values are dropped.
5//! The main entry point of this crate is the [`Backdrop<T, Strategy>`] wrapper type.
6//! This will wrap any 'normal' type `T` with a zero-cost wrapper
7//! that customizes how it is dropped based on the given `Strategy`,
8//! which is a marker (zero-size compile-time only) type that implements the
9//! [`BackdropStrategy<T>`] trait.
10//!
11//! # Which strategy is best?
12//! This probably depends very much on your application! Be sure to benchmark!
13//! For general applications, the following are good starting recommendations:
14//! - [`TrashThreadStrategy`] if you are working on a normal application where multithreading is possible.
15//! - [`TokioTaskStrategy`] or [`TokioBlockingTaskStrategy`] if you are building an `async` application on top of the [`::tokio`] crate.
16//! - [`TrashQueueStrategy`] if you are writing a single-threaded application.
17//!
18//! Backdrop also ships with a bunch of 'simple testing' strategies ([`LeakStrategy`], [`TrivialStrategy`], [`DebugStrategy`], [`ThreadStrategy`]),
19//! that can help to understand how `backdrop` works, as leaning tool to build your own strategies, and as benchmarking baseline.
20//!
21//! # Limitations
22//! `Backdrop<T, S>` implements the [`Deref`] and [`DerefMut`] traits, enabling you to use most methods available on `T` also on a `Backdrop<T>`.
23//! On top of this, a bunch of common traits have been implemented for `Backdrop<T, S>` whenever they are implemented for `T`.
24//!
25//! # Features
26//! ## Basic features
27//! - You can disable the `std` feature (enabled by default) to use this crate in no-std contexts.
28//! Without `std`, none of the [`thread`]-based strategies are available.
29//! The [`DebugStrategy`] is also disabled as it depends on `println`.
30//! - As long as the `alloc` feature is not disabled (enabled by default; part of the `std` feature)
31//! the single-threaded [`TrashQueueStrategy`] is still available to you.
32//! If you also do not have access to `alloc` , you'll probably want to create your own strategy for your particular no-std situation.
33//! - You can enable the optional `tokio` feature to get access to strategies that drop on a background _tokio task_. (C.f. the [`tokio`] module)
34//!
35//! ## Using Backdrop with traits from other crates
36//! - `rkyv` enables `Archive`/`Serialize`/`Deserialize` support for `Backdrop<T, S>` iff they are implemented for `T`.
37//! - `bytecheck` enables `CheckBytes` support for `Backdrop<T, S>` iff implemented for `T`.
38//!
39//! Need support for something else? Please open a PR and we can add it as an optional feature.
40
41
42#[cfg(feature = "std")]
43extern crate std;
44
45#[cfg(feature = "std")]
46pub mod thread;
47#[cfg(feature = "std")]
48#[doc(inline)]
49pub use thread::{ThreadBackdrop, TrashThreadBackdrop, ThreadStrategy, TrashThreadStrategy};
50
51#[cfg(feature = "tokio")]
52pub mod tokio;
53
54#[cfg(feature = "tokio")]
55#[doc(inline)]
56pub use crate::tokio::{TokioTaskBackdrop, TokioBlockingTaskBackdrop, TokioTaskStrategy, TokioBlockingTaskStrategy};
57
58
59use core::marker::PhantomData;
60use core::ops::{Deref, DerefMut};
61use core::mem::ManuallyDrop;
62
63/// The strategy to use to drop `T`.
64///
65/// Most implementations of this trait place additional requirements on `T`.
66/// For instance, all strategies that move T to a separate thread to be dropped there
67/// introduce a `T: Send + 'static` bound.
68///
69pub trait BackdropStrategy<T> {
70 /// Called whenever `T` should be dropped.
71 ///
72 /// The trivial implementation (and indeed, [`TrivialStrategy`] is implemented this way)
73 /// is to do nothing. Then `T` will just directly be dropped right here, right now, because it is passed by value:
74 ///
75 /// ```ignore
76 /// pub struct TrivialStrategy();
77 ///
78 /// impl<T> BackdropStrategy<T> for TrivialStrategy {
79 /// fn execute(_droppable: T) {
80 /// }
81 /// }
82 /// ```
83 /// Or, for clarity:
84 /// ```ignore
85 /// pub struct TrivialStrategy();
86 ///
87 /// impl<T> BackdropStrategy<T> for TrivialStrategy {
88 /// fn execute(droppable: T) {
89 /// core::mem::drop(droppable)
90 /// }
91 /// }
92 /// ```
93 ///
94 /// But obviously that is not very exciting/helpful.
95 /// Most implementations move the `T` to somewhere else somehow, and then it will be dropped there.
96 ///
97 /// To give you another example, here is how [`ThreadStrategy`] works:
98 /// ```ignore
99 /// pub struct ThreadStrategy();
100 ///
101 /// impl<T: Send + 'static> BackdropStrategy<T> for ThreadStrategy {
102 /// fn execute(droppable: T) {
103 /// std::thread::spawn(|| {
104 /// core::mem::drop(droppable);
105 /// });
106 /// }
107 /// }
108 /// ````
109 fn execute(droppable: T);
110}
111
112/// Wrapper to drop any value at a later time, such as in a background thread.
113///
114/// `Backdrop<T, Strategy>` is guaranteed to have the same in-memory representation as `T`.
115/// As such, wrapping (and unwrapping) a `T` into a `Backdrop<T, S>` has zero memory overhead.
116///
117/// Besides altering how `T` is dropped, a `Backdrop<T, S>` behaves as much as possible as a `T`.
118/// This is done by implementing [`Deref`] and [`DerefMut`]
119/// so most methods available for `T` are also immediately available for `Backdrop<T>`.
120/// `Backdrop<T, S>` also implements many common traits whenever `T` implements these.
121///
122/// # Customizing the strategy
123///
124/// You customize what strategy is used by picking your desired `S` parameter,
125/// which can be any type that implements the [`BackdropStrategy`] trait.
126/// This crate comes with many common strategies, but you can also implement your own.
127///
128/// # Restrictions
129///
130/// `Backdrop<T, Strategy>` does not restrict `T` (besides `T` needing to be [`Sized`]). However,
131/// Many [`Strategy`](`BackdropStrategy<T>`) only implement [`BackdropStrategy<T>`] when `T` fits certain restrictions.
132/// For instance, the [`TrashThreadStrategy`] requires `T` to be `Send` since `T` will be moved to another thread to be cleaned up there.
133///
134/// What about [unsized/dynamically-sized](https://doc.rust-lang.org/nomicon/exotic-sizes.html) types? The current implementation of `Backdrop` restricts `T` to be [`Sized`] mostly for ease of implementation.
135/// It is our expectation that your unsized datastructures probably are already nested in a [`std::boxed::Box<T>`] or other smart pointer,
136/// which you can wrap with `Backdrop` as a whole.
137/// _(Side note: Zero-sized types can be wrapped by `Backdrop` without problems.)_
138///
139/// There is one final important restriction:
140/// ### The problem with Arc
141/// A `Backdrop<Arc<T>, S>` will not behave as you might expect:
142/// It will cause the backdrop strategy to run whenever the reference count is decremented.
143/// But what you probably want, is to run the backdrop strategy exactly when the last [`Arc<T>`][arc] is dropped
144/// (AKA when the reference count drops to 0) and the _contents_ of the [`Arc`][arc] go out of scope.
145///
146/// A `Arc<Backdrop<Box<T>, S>>` _will_ work as you expect, but you incur an extra pointer indirection (arc -> box -> T)
147/// every time you read its internal value.
148///
149/// Instead, use the [`backdrop_arc`](https://crates.io/crates/backdrop_arc) crate, which contains
150/// a specialized `Arc` datatype that does exactly what you want without a needless indirection.
151///
152/// [arc]: std::sync::Arc
153#[repr(transparent)]
154pub struct Backdrop<T, S: BackdropStrategy<T>> {
155 val: ManuallyDrop<T>,
156 _marker: PhantomData<S>,
157}
158
159impl<T, Strategy: BackdropStrategy<T>> Backdrop<T, Strategy> {
160 /// Construct a new [`Backdrop<T, S>`] from any T. This is a zero-cost operation.
161 ///
162 /// From now on, T will no longer be dropped normally,
163 /// but instead it will be dropped using the implementation of the given [`BackdropStrategy`].
164 ///
165 /// ```
166 /// use backdrop::*;
167 ///
168 /// // Either specify the return type:
169 /// let mynum: Backdrop<usize, LeakStrategy> = Backdrop::new(42);
170 ///
171 /// // Or use the 'Turbofish' syntax on the function call:
172 /// let mynum2 = Backdrop::<_, LeakStrategy>::new(42);
173 ///
174 /// // Or use one of the shorthand type aliases:
175 /// let mynum3 = LeakBackdrop::new(42);
176 ///
177 /// assert_eq!(mynum, mynum2);
178 /// assert_eq!(mynum2, mynum3);
179 /// // <- Because we are using the LeakStrategy, we leak memory here. Fun! :-)
180 /// ```
181 /// This function is the inverse of [`Backdrop::into_inner`].
182 ///
183 #[inline]
184 pub fn new(val: T) -> Self {
185 Self {
186 val: ManuallyDrop::new(val),
187 _marker: PhantomData,
188 }
189 }
190
191 /// Turns a [`Backdrop<T, S>`] back into a normal T.
192 /// This undoes the effect of Backdrop.
193 /// The resulting T will be dropped again using normal rules.
194 /// This function is the inverse of [`Backdrop::new`].
195 ///
196 /// This is a zero-cost operation.
197 ///
198 /// This is an associated function, so call it using fully-qualified syntax.
199 #[inline]
200 pub fn into_inner(mut this: Self) -> T {
201 // SAFETY: we forget the container after `this.val` is taken out.
202 unsafe {
203 let inner = ManuallyDrop::take(&mut this.val);
204 core::mem::forget(this);
205 inner
206 }
207 }
208
209 /// Changes the strategy used for a Backdrop.
210 ///
211 /// This is a zero-cost operation
212 ///
213 /// This is an associated function, so call it using fully-qualified syntax.
214 ///
215 /// ```
216 /// use backdrop::*;
217 ///
218 /// let foo = LeakBackdrop::new(42);
219 /// let foo = Backdrop::change_strategy::<TrivialStrategy>(foo);
220 /// // Now `foo` will be dropped according to TrivialStrategy (which does the normal drop rules)
221 /// // rather than LeakStrategy (which does not cleanup by leaking memory)
222 /// ```
223 pub fn change_strategy<S2: BackdropStrategy<T>>(this: Self) -> Backdrop<T, S2> {
224 Backdrop::<T, S2>::new(Backdrop::into_inner(this))
225 }
226}
227
228/// This is where the magic happens: Instead of dropping `T` normally, we run [`Strategy::execute`](BackdropStrategy::execute) on it.
229impl<T, Strategy: BackdropStrategy<T>> Drop for Backdrop<T, Strategy> {
230 #[inline]
231 fn drop(&mut self) {
232 // SAFETY: self.val is not used again after this call
233 // and since self is already being dropped, no further cleanup is necessary
234 let inner = unsafe { ManuallyDrop::take(&mut self.val)};
235 Strategy::execute(inner)
236 }
237}
238
239impl<T, S: BackdropStrategy<T>> core::ops::Deref for Backdrop<T, S> {
240 type Target = T;
241 #[inline]
242 fn deref(&self) -> &T {
243 // SAFETY: self.1 is filled with an initialized value on construction
244 self.val.deref()
245 }
246}
247
248impl<T, S: BackdropStrategy<T>> DerefMut for Backdrop<T, S> {
249 #[inline]
250 fn deref_mut(&mut self) -> &mut T {
251 // SAFETY: self.1 is filled with an initialized value on construction
252 self.val.deref_mut()
253 }
254}
255
256impl<T: core::fmt::Debug, S> core::fmt::Debug for Backdrop<T, S>
257 where
258 S: BackdropStrategy<T>,
259{
260 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
261 core::fmt::Debug::fmt(&**self, f)
262 }
263}
264
265impl<T: core::fmt::Display, S> core::fmt::Display for Backdrop<T, S>
266where
267 S: BackdropStrategy<T>,
268{
269 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
270 core::fmt::Display::fmt(&**self, f)
271 }
272}
273
274impl<T: Clone, S> Clone for Backdrop<T, S>
275where
276 S: BackdropStrategy<T>,
277{
278 fn clone(&self) -> Self {
279 Self::new(self.deref().clone())
280 }
281}
282
283impl<T: core::cmp::PartialEq, S> core::cmp::PartialEq for Backdrop<T, S>
284where
285 S: BackdropStrategy<T>,
286{
287 fn eq(&self, other: &Self) -> bool {
288 self.deref().eq(other.deref())
289 }
290}
291
292impl<T: core::cmp::Eq, S> core::cmp::Eq for Backdrop<T, S>
293where
294 S: BackdropStrategy<T>,
295{ }
296
297impl<T: core::cmp::PartialOrd, S> core::cmp::PartialOrd for Backdrop<T, S>
298where
299 S: BackdropStrategy<T>,
300{
301 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
302 self.deref().partial_cmp(other.deref())
303 }
304}
305
306impl<T: core::cmp::Ord, S> core::cmp::Ord for Backdrop<T, S>
307where
308 S: BackdropStrategy<T>,
309{
310 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
311 self.deref().cmp(other.deref())
312 }
313}
314
315impl<T: core::hash::Hash, S> core::hash::Hash for Backdrop<T, S>
316where
317 S: BackdropStrategy<T>,
318{
319 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
320 self.deref().hash(state)
321 }
322}
323
324/// Converting between a T and a Backdrop<T, S> is a zero-cost operation
325///
326/// c.f. [`Backdrop::new`]
327impl<T, S> From<T> for Backdrop<T, S>
328 where
329 S: BackdropStrategy<T>,
330{
331 fn from(val: T) -> Self {
332 Backdrop::new(val)
333 }
334}
335
336/// Strategy which drops the contained value normally.
337///
338/// It behaves exactly as if the backdrop was not there.
339///
340/// Its main purpose is to be able to easily test the advantage of another strategy
341/// in a benchmark, without having to completely alter the structure of your code.
342pub struct TrivialStrategy();
343
344impl<T> BackdropStrategy<T> for TrivialStrategy {
345 #[inline]
346 fn execute(droppable: T) {
347 core::mem::drop(droppable)
348 }
349}
350
351pub type TrivialBackdrop<T> = Backdrop<T, TrivialStrategy>;
352
353
354
355/// Strategy which will leak the contained value rather than dropping it.
356///
357/// This is not normally useful, except for testing what the overhead is
358/// of whatever code is surrounding your drop glue.
359pub struct LeakStrategy();
360
361impl<T> BackdropStrategy<T> for LeakStrategy {
362 #[inline]
363 fn execute(droppable: T) {
364 core::mem::forget(droppable)
365 }
366}
367
368pub type LeakBackdrop<T> = Backdrop<T, LeakStrategy>;
369
370
371/// 'Wrapper' strategy that prints out T when executed.
372///
373/// Takes another strategy as generic type argument.
374///
375/// The exact printed message is not considered a stable API;
376/// it is intended for human programmer eyes only.
377#[cfg(feature = "std")]
378pub struct DebugStrategy<InnerStrategy>(PhantomData<InnerStrategy>);
379
380#[cfg(feature = "std")]
381impl<T, InnerStrategy> BackdropStrategy<T> for DebugStrategy<InnerStrategy>
382 where
383 T: std::fmt::Debug,
384 InnerStrategy: BackdropStrategy<T>,
385{
386 #[inline]
387 fn execute(droppable: T) {
388 use std::println;
389 println!("Using BackdropStrategy '{}' to drop value {:?}", std::any::type_name::<InnerStrategy>(), &droppable);
390 InnerStrategy::execute(droppable)
391 }
392}
393
394#[cfg(feature = "alloc")]
395extern crate alloc;
396#[cfg(feature = "alloc")]
397use alloc::{boxed::Box, collections::VecDeque};
398
399#[cfg(feature = "alloc")]
400use core::cell::RefCell;
401#[cfg(feature = "alloc")]
402use core::any::Any;
403
404#[cfg(feature = "std")]
405std::thread_local!{
406 static TRASH_QUEUE: RefCell<VecDeque<Box<dyn Any>>> = VecDeque::new().into();
407}
408
409// Fallback implementation for when std::thread_local! is not available
410#[cfg(all(not(feature = "std"), feature = "alloc"))]
411static mut TRASH_QUEUE: Option<core::cell::RefCell<VecDeque<Box<dyn core::any::Any>>>> = None;
412
413// When std::thread_local! exists we can safely call the closure
414#[cfg(feature = "std")]
415fn with_single_threaded_trash_queue(closure: impl FnOnce(&RefCell<VecDeque<Box<dyn Any>>>)) {
416 TRASH_QUEUE.with(|tq_cell| {
417 closure(tq_cell);
418 });
419}
420
421// In no_std (but alloc) contexts, we expect the program to run single-threaded
422// And we call the closure using unsafe in a best-effort basis.
423#[cfg(all(not(feature = "std"), feature = "alloc"))]
424fn with_single_threaded_trash_queue(closure: impl FnOnce(&RefCell<VecDeque<Box<dyn Any>>>)) {
425 let tq_ref = unsafe { &mut TRASH_QUEUE };
426 if tq_ref.is_none() {
427 *tq_ref = Some(VecDeque::new().into());
428 }
429 closure(unsafe { &TRASH_QUEUE.as_ref().unwrap() });
430}
431
432/// Strategy which adds garbage to a global 'trash [`VecDeque`]'.
433///
434/// In `std` contexts, this trash queue is protected using [`std::thread_local!`].
435/// In `no_std` contexts, it is instead implemented as a [mutable static](https://doc.rust-lang.org/reference/items/static-items.html#mutable-statics) variable.
436///
437/// Perfectly fine for truly single-threaded applications.
438///
439/// This does mean that that if you do use some sort of 'alternative threading' in a `no_std` context, this strategy will be unsound!
440#[cfg(feature = "alloc")]
441pub struct TrashQueueStrategy();
442
443#[cfg(feature = "alloc")]
444impl TrashQueueStrategy {
445 /// Makes sure the global (thread local) queue is initialized
446 /// If you do not call this, it will be initialized the first time an object is dropped,
447 /// which will add some overhead at that moment.
448 ///
449 /// Called automatically by [`TrashQueueStrategy::cleanup_on_exit()`]
450 pub fn ensure_initialized() {
451 with_single_threaded_trash_queue(|_tq_cell| {});
452 }
453
454 /// Cleans up a single item in the trash queue.
455 ///
456 /// Returns `true` if there is more garbage in the queue at this moment.
457 /// That could be used to e.g. clean up 'some' garbage but not all.
458 pub fn cleanup_one() -> bool {
459 let mut queue_nonempty = false;
460 with_single_threaded_trash_queue(|tq_cell| {
461 let mut tq = tq_cell.borrow_mut();
462 let item = tq.pop_front();
463 core::mem::drop(item);
464 queue_nonempty = tq.is_empty();
465 });
466 queue_nonempty
467 }
468
469 /// Cleans up everything that is in the trash queue.
470 pub fn cleanup_all() {
471 with_single_threaded_trash_queue(|tq_cell| {
472 let mut tq = tq_cell.borrow_mut();
473 while let Some(item) = tq.pop_front() {
474 core::mem::drop(item);
475 }
476 });
477 }
478
479 /// Wrapper which will:
480 /// - Call [`TrashQueueStrategy::ensure_initialized()`] before your closure
481 /// - Call your closure
482 /// - Call [`TrashQueueStrategy::cleanup_all()`] after your closure.
483 ///
484 /// As such, you can use this to delay dropping until after your critical code section very easily:
485 pub fn cleanup_on_exit<R>(closure: impl FnOnce() -> R) -> R {
486 TrashQueueStrategy::ensure_initialized();
487 let outcome = closure();
488 TrashQueueStrategy::cleanup_all();
489 outcome
490 }
491}
492
493#[cfg(feature = "alloc")]
494impl<T: 'static> BackdropStrategy<T> for TrashQueueStrategy {
495 fn execute(droppable: T) {
496 let boxed: Box<dyn core::any::Any> = Box::new(droppable);
497 with_single_threaded_trash_queue(|tq_cell| {
498 let mut tq = tq_cell.borrow_mut();
499 tq.push_back(boxed);
500 });
501 }
502}
503
504#[cfg(feature = "rkyv")]
505impl<T: rkyv::Archive, S> rkyv::Archive for Backdrop<T, S>
506where
507 S: BackdropStrategy<T>,
508{
509 type Archived = rkyv::Archived<T>;
510 type Resolver = rkyv::Resolver<T>;
511 unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
512 self.deref().resolve(pos, resolver, out);
513 }
514}
515
516#[cfg(feature = "rkyv")]
517impl<Ser, T: rkyv::Archive + rkyv::Serialize<Ser>, S> rkyv::Serialize<Ser> for Backdrop<T, S>
518where
519 Ser: rkyv::Fallible,
520 S: BackdropStrategy<T>,
521{
522 fn serialize(&self, serializer: &mut Ser) -> Result<Self::Resolver, <Ser as rkyv::Fallible>::Error> {
523 self.deref().serialize(serializer)
524 }
525}
526
527
528#[cfg(feature = "rkyv")]
529impl<Des, T, S> rkyv::Deserialize<Backdrop<T, S>, Des> for rkyv::Archived<T>
530where
531 T: rkyv::Archive,
532 rkyv::Archived<T>: rkyv::Deserialize<T, Des>,
533 Des: rkyv::Fallible,
534 S: BackdropStrategy<T>,
535{
536 fn deserialize(&self, deserializer: &mut Des) -> Result<Backdrop<T, S>, <Des as rkyv::Fallible>::Error> {
537 let inner: T = self.deserialize(deserializer)?;
538 Ok(Backdrop::new(inner))
539 }
540}
541
542#[cfg(feature = "bytecheck")]
543impl<C: ?Sized, T: bytecheck::CheckBytes<C>, S> bytecheck::CheckBytes<C> for Backdrop<T, S>
544where
545 S: BackdropStrategy<T>,
546{
547 type Error = <T as bytecheck::CheckBytes<C>>::Error;
548 unsafe fn check_bytes<'a>(value: *const Self, context: &mut C) -> Result<&'a Self, Self::Error>{
549 // SAFETY: Backdrop is repr(transparent) so this pointer cast is OK
550 let inner_ref = bytecheck::CheckBytes::check_bytes(value as *const T, context)?;
551 // SAFETY: Backdrop is repr(transparent) so this transmute is OK
552 Ok(core::mem::transmute(inner_ref))
553 }
554}