recycle_box/
lib.rs

1//! A pointer type for heap-allocated objects which heap storage can be re-used.
2//!
3//! The box can be consumed to drop the current object and re-use the allocated
4//! space to store another object, which type may be different. New memory will
5//! only be allocated if the new object does not fit within the currently
6//! allocated space.
7//!
8//! Coercion from `Sized` to `!Sized` boxed objects is supported, including on
9//! Rust stable.
10//!
11//! Last but not least: `Pin`ned boxes can be recycled too, which is useful when
12//! repeatedly allocating `Future`s.
13//!
14//! # Examples
15//!
16//! Store different objects, re-using if possible the previously allocated
17//! storage:
18//!
19//! ```
20//! use recycle_box::RecycleBox;
21//!
22//! // Store an object.
23//! let box1 = RecycleBox::new(123u64);
24//!
25//! // Store a smaller object.
26//! let box2 = RecycleBox::recycle(box1, 456u16); // Does not allocate
27//!
28//! // Store a larger object.
29//! let box3 = RecycleBox::recycle(box2, [123u32; 8]); // New memory is allocated
30//!
31//! // Move out and replace the previous object.
32//! let (array3, box4) = RecycleBox::replace(box3, 789u32); // Does not allocate
33//!
34//! // Drop the current object but preserve the allocated memory for further re-use.
35//! // Note that `vacate()` is just an explicit shorthand for `recycle(())`.
36//! let box5 = RecycleBox::vacate(box4);
37//! ```
38//!
39//! Re-use the same box for different objects sharing the same trait:
40//!
41//! ```
42//! use std::future::{self, Future};
43//! use recycle_box::{RecycleBox, coerce_box};
44//!
45//! let mut my_box: RecycleBox<dyn Future<Output = i32>> =
46//!     coerce_box!(RecycleBox::new(future::ready(42)));
47//! my_box = coerce_box!(RecycleBox::new(future::pending()));
48//! ```
49//!
50//! Recycle a pinned box.
51//!
52//! ```
53//! use std::pin::Pin;
54//! use recycle_box::RecycleBox;
55//!
56//! let pinned_box: Pin<_> = RecycleBox::new(42).into();
57//! let new_box = RecycleBox::recycle_pinned(pinned_box, "Forty two");
58//! ```
59
60#![warn(missing_docs, missing_debug_implementations, unreachable_pub)]
61
62use std::alloc::{self, Layout};
63use std::cmp::max;
64use std::fmt;
65use std::future::Future;
66use std::mem::ManuallyDrop;
67use std::ops::{Deref, DerefMut};
68use std::pin::Pin;
69use std::ptr::{self, NonNull};
70use std::task::{Context, Poll};
71
72/// A pointer type for heap-allocated objects which heap storage can be
73/// re-used.
74///
75/// See the [module-level documentation](../../doc/recycle_box/index.html) for more.
76pub struct RecycleBox<T>
77where
78    T: ?Sized,
79{
80    ptr: NonNull<T>, // NonNull ensures covariance with respect to T.
81    base_ptr: NonNull<u8>,
82    layout: Layout,
83}
84
85impl<T> RecycleBox<T> {
86    /// Allocates heap memory based on the size of `T` and places `x` into it.
87    pub fn new(x: T) -> Self {
88        unsafe { Self::with_layout_unchecked(x, Layout::new::<T>()) }
89    }
90
91    /// Allocates heap memory based on the specified layout and places `x` into
92    /// it.
93    ///
94    /// If type `T` does not fit within the specified layout, the layout
95    /// alignment and size are increased as needed.
96    pub fn with_layout(x: T, layout: Layout) -> Self {
97        let x_layout = Layout::new::<T>();
98        let x_size_margin = if x_layout.align() > layout.align() {
99            x_layout.align() - layout.align()
100        } else {
101            0
102        };
103        assert!(x_layout.size() <= (std::usize::MAX - x_size_margin));
104
105        let size = max(x_layout.size() + x_size_margin, layout.size());
106        let safe_layout = Layout::from_size_align(size, layout.align()).unwrap();
107
108        // Actually build the box.
109        // A panic will be triggered on OOM.
110        unsafe { Self::with_layout_unchecked(x, safe_layout) }
111    }
112
113    /// Allocates heap memory based on the specified layout and places `x` into
114    /// it without verifying that the layout can fit type `T`.
115    unsafe fn with_layout_unchecked(x: T, layout: Layout) -> Self {
116        let base_ptr = if layout.size() == 0 {
117            // Do not perform allocation for zero-sized types.
118            NonNull::dangling()
119        } else {
120            // Allocate memory.
121            // A panic will be triggered on OOM.
122            NonNull::new(alloc::alloc(layout)).unwrap()
123        };
124        let ptr = compute_ptr(base_ptr.as_ptr(), layout).unwrap(); // will never panic unless the layout is incompatible with T.
125        ptr::write(ptr, x);
126
127        Self {
128            ptr: NonNull::new_unchecked(ptr),
129            base_ptr,
130            layout,
131        }
132    }
133
134    /// Consumes the box and return both the old value and a new box, re-using
135    /// if possible the already allocated memory.
136    ///
137    /// No memory is allocated unless the new object does not fit within the
138    /// already allocated memory block.
139    pub fn replace<U>(boxed: Self, x: U) -> (T, RecycleBox<U>) {
140        let boxed = ManuallyDrop::new(boxed);
141        let old: T = unsafe { ptr::read(boxed.ptr.as_ptr()) };
142
143        if let Some(ptr) = compute_ptr(boxed.base_ptr.as_ptr(), boxed.layout) {
144            unsafe {
145                ptr::write(ptr, x);
146
147                (
148                    old,
149                    RecycleBox {
150                        base_ptr: boxed.base_ptr,
151                        ptr: NonNull::new_unchecked(ptr),
152                        layout: boxed.layout,
153                    },
154                )
155            }
156        } else {
157            unsafe {
158                if boxed.layout.size() != 0 {
159                    alloc::dealloc(boxed.base_ptr.as_ptr(), boxed.layout);
160                }
161
162                (old, RecycleBox::new(x))
163            }
164        }
165    }
166
167    /// Consumes the box and return both the old value and a box containing an
168    /// empty tuple.
169    ///
170    /// This is functionally equivalent to calling [`RecycleBox::replace`] with
171    /// an empty tuple as argument, but is more explicit when the intent is
172    /// specifically to take the contained object while preserving the allocated
173    /// memory for further re-use. It may also be slightly more efficient.
174    pub fn take(boxed: Self) -> (T, RecycleBox<()>) {
175        let boxed = ManuallyDrop::new(boxed);
176        let old: T = unsafe { ptr::read(boxed.ptr.as_ptr()) };
177
178        (
179            old,
180            RecycleBox {
181                base_ptr: boxed.base_ptr,
182                ptr: NonNull::dangling(),
183                layout: boxed.layout,
184            },
185        )
186    }
187
188    /// Constructs a new `Pin<RecycleBox<T>>`. If `T` does not implement
189    /// `Unpin`, then `x` will be pinned in memory and unable to be moved.
190    pub fn pin(x: T) -> Pin<RecycleBox<T>> {
191        RecycleBox::into_pin(RecycleBox::new(x))
192    }
193}
194
195impl<T> RecycleBox<T>
196where
197    T: ?Sized,
198{
199    /// Consumes the box and creates another one, re-using if possible the
200    /// already allocated memory.
201    ///
202    /// The current boxed object is dropped. No memory is allocated unless the
203    /// new object does not fit within the already allocated memory block.
204    pub fn recycle<U>(boxed: Self, x: U) -> RecycleBox<U> {
205        if let Some(ptr) = compute_ptr(boxed.base_ptr.as_ptr(), boxed.layout) {
206            unsafe {
207                let boxed = ManuallyDrop::new(boxed);
208                ptr::drop_in_place(boxed.ptr.as_ptr());
209                ptr::write(ptr, x);
210                RecycleBox {
211                    base_ptr: boxed.base_ptr,
212                    ptr: NonNull::new_unchecked(ptr),
213                    layout: boxed.layout,
214                }
215            }
216        } else {
217            drop(boxed);
218            RecycleBox::new(x)
219        }
220    }
221
222    /// Consumes the box and creates another one containing an empty tuple.
223    ///
224    /// This is functionally equivalent to calling [`RecycleBox::recycle`] with
225    /// an empty tuple as argument, but is more explicit when the intent is
226    /// specifically to drop the contained object while preserving the allocated
227    /// memory for further re-use. It may also be slightly more efficient.
228    pub fn vacate(boxed: Self) -> RecycleBox<()> {
229        unsafe {
230            let boxed = ManuallyDrop::new(boxed);
231            ptr::drop_in_place(boxed.ptr.as_ptr());
232            RecycleBox {
233                base_ptr: boxed.base_ptr,
234                ptr: NonNull::dangling(),
235                layout: boxed.layout,
236            }
237        }
238    }
239
240    /// Consumes a pinned box and creates another box, re-using if possible the
241    /// already allocated memory.
242    ///
243    /// This is the same as [`RecycleBox::recycle`] but for a pinned `RecycleBox`.
244    /// The `Pin` contract is upheld since the current object is dropped before
245    /// it is replaced by the new object.
246    ///
247    /// # Example
248    ///
249    /// ```
250    /// use std::pin::Pin;
251    /// use recycle_box::RecycleBox;
252    ///
253    /// let pinned_box: Pin<_> = RecycleBox::new(42).into();
254    /// let new_box = RecycleBox::recycle_pinned(pinned_box, "Forty two");
255    /// ```
256    pub fn recycle_pinned<U>(boxed: Pin<RecycleBox<T>>, x: U) -> RecycleBox<U> {
257        unsafe { Self::recycle(Pin::into_inner_unchecked(boxed), x) }
258    }
259
260    /// Consumes a pinned box and creates another box containing an empty tuple.
261    ///
262    /// This is the same as [`RecycleBox::vacate`] but for a pinned `RecycleBox`.
263    /// The `Pin` contract is upheld since the current object is dropped before
264    /// it is replaced by an empty tuple.
265    ///
266    /// # Example
267    ///
268    /// ```
269    /// use std::pin::Pin;
270    /// use recycle_box::RecycleBox;
271    ///
272    /// let pinned_box: Pin<_> = RecycleBox::new(42).into();
273    /// let empty_box = RecycleBox::vacate_pinned(pinned_box);
274    /// ```
275    pub fn vacate_pinned(boxed: Pin<RecycleBox<T>>) -> RecycleBox<()> {
276        unsafe { Self::vacate(Pin::into_inner_unchecked(boxed)) }
277    }
278
279    /// Converts a `RecycleBox<T>` into a `Pin<RecycleBox<T>>`. If `T` does not
280    /// implement `Unpin`, then `*boxed` will be pinned in memory and unable to
281    /// be moved.
282    ///
283    /// This conversion does not allocate on the heap and happens in place.
284    pub fn into_pin(boxed: Self) -> Pin<Self> {
285        // It is not possible to move or replace the insides of a `Pin<Box<T>>`
286        // when `T: !Unpin`, so it's safe to pin it directly without any
287        // additional requirements.
288        unsafe { Pin::new_unchecked(boxed) }
289    }
290
291    /// Constructs a box from raw pointers and a layout.
292    ///
293    /// The `T` object pointed to and the storage defined by the base pointer
294    /// and the layout become owned by the resulting `RecycleBox`.
295    ///
296    /// # Safety
297    ///
298    /// The caller is responsible for making sure that the object pointed to
299    /// exists, is of the proper type and is within an already allocated memory
300    /// block consistent with the specified base pointer and layout. Also, the
301    /// user must ensure that ownership of the object and of the allocated
302    /// memory is exclusively held by the box.
303    pub unsafe fn from_raw_parts(ptr: *mut T, base_ptr: *mut u8, layout: Layout) -> Self {
304        Self {
305            ptr: NonNull::new_unchecked(ptr),
306            base_ptr: NonNull::new_unchecked(base_ptr),
307            layout,
308        }
309    }
310
311    /// Consumes the Box, returning its internal data.
312    ///
313    /// The caller becomes responsible for dropping the object pointed to and
314    /// deallocating the backing storage.
315    pub fn into_raw_parts(boxed: Self) -> (*mut T, *mut u8, Layout) {
316        let boxed = ManuallyDrop::new(boxed);
317        (boxed.ptr.as_ptr(), boxed.base_ptr.as_ptr(), boxed.layout)
318    }
319}
320
321/// Checks whether type `T` can be stored within the specified layout at the specified address.
322///
323/// If type `T` does fit, a pointer with an adequately aligned address is
324/// returned.
325fn compute_ptr<T>(base_ptr: *mut u8, layout: Layout) -> Option<*mut T> {
326    // Calculate the offset dictated by the new alignment.
327    let value_layout = Layout::new::<T>();
328    if layout.size() < value_layout.size() {
329        return None;
330    }
331
332    // Note that `align_offset` returns usize::MAX when unsuccessful so the
333    // below will appropriately return `None` in such case too.
334    let offset = base_ptr.align_offset(value_layout.align());
335    if offset <= layout.size() - value_layout.size() {
336        Some(unsafe { base_ptr.add(offset) } as *mut T)
337    } else {
338        None
339    }
340}
341
342unsafe impl<T: Send + ?Sized> Send for RecycleBox<T> {}
343unsafe impl<T: Sync + ?Sized> Sync for RecycleBox<T> {}
344
345impl<T> Drop for RecycleBox<T>
346where
347    T: ?Sized,
348{
349    fn drop(&mut self) {
350        unsafe {
351            ptr::drop_in_place(self.ptr.as_ptr());
352            if self.layout.size() != 0 {
353                alloc::dealloc(self.base_ptr.as_ptr(), self.layout);
354            }
355        }
356    }
357}
358
359impl<T> AsRef<T> for RecycleBox<T>
360where
361    T: ?Sized,
362{
363    fn as_ref(&self) -> &T {
364        // Safety is warranted by unique ownership and covariance with `T`.
365        unsafe { self.ptr.as_ref() }
366    }
367}
368
369impl<T> AsMut<T> for RecycleBox<T>
370where
371    T: ?Sized,
372{
373    fn as_mut(&mut self) -> &mut T {
374        // Safety is warranted by unique ownership and covariance with `T`.
375        unsafe { self.ptr.as_mut() }
376    }
377}
378
379impl<T> Deref for RecycleBox<T>
380where
381    T: ?Sized,
382{
383    type Target = T;
384
385    fn deref(&self) -> &T {
386        self.as_ref()
387    }
388}
389
390impl<T> DerefMut for RecycleBox<T>
391where
392    T: ?Sized,
393{
394    fn deref_mut(&mut self) -> &mut T {
395        self.as_mut()
396    }
397}
398
399impl<T> fmt::Display for RecycleBox<T>
400where
401    T: fmt::Display + ?Sized,
402{
403    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
404        fmt::Display::fmt(&**self, f)
405    }
406}
407
408impl<T> fmt::Debug for RecycleBox<T>
409where
410    T: fmt::Debug + ?Sized,
411{
412    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
413        fmt::Debug::fmt(&**self, f)
414    }
415}
416
417impl<T> fmt::Pointer for RecycleBox<T>
418where
419    T: ?Sized,
420{
421    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
422        let ptr: *const T = &**self;
423        fmt::Pointer::fmt(&ptr, f)
424    }
425}
426
427impl<T> From<RecycleBox<T>> for Pin<RecycleBox<T>>
428where
429    T: ?Sized,
430{
431    fn from(boxed: RecycleBox<T>) -> Self {
432        // The `Pin` contract is upheld provided that the value pointed to is
433        // dropped in place before a new object is allocated. Therefore, the
434        // safety of this function depends primarily on the correct
435        // implementations of `recycle` and `vacate`.
436        unsafe { Pin::new_unchecked(boxed) }
437    }
438}
439
440impl<F> Future for RecycleBox<F>
441where
442    F: ?Sized + Future + Unpin,
443{
444    type Output = F::Output;
445
446    fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
447        F::poll(Pin::new(&mut *self), cx)
448    }
449}
450
451/// Macro coercing a `RecycleBox<T>` into a `RecycleBox<U>`, provided that `T`
452/// can be coerced to `U`.
453///
454/// This can be used to obtain a pointer to an unsized type, such as a trait
455/// object, from of a `Sized` type.
456///
457/// # Examples
458///
459/// Make a boxed object into a boxed trait object:
460///
461/// ```
462/// use recycle_box::{RecycleBox, coerce_box};
463/// use std::fmt::Display;
464///
465/// let x = RecycleBox::new(1234u32);
466/// let x_display: RecycleBox<dyn Display> = coerce_box!(x);
467///
468/// println!("{}", x_display.as_ref())
469/// ```
470///
471/// Reuse the box of a trait object for another object sharing the same trait:
472///
473/// ```
474/// use std::future::{self, Future};
475/// use recycle_box::{RecycleBox, coerce_box};
476///
477/// let mut my_box: RecycleBox<dyn Future<Output = i32>> = coerce_box!(RecycleBox::new(future::ready(42)));
478/// my_box = coerce_box!(RecycleBox::new(future::pending()));
479/// ```
480#[macro_export]
481macro_rules! coerce_box {
482    ($src:expr) => {{
483        let (base_ptr, ptr, layout) = $crate::RecycleBox::into_raw_parts($src);
484        unsafe { $crate::RecycleBox::from_raw_parts(base_ptr, ptr, layout) }
485    }};
486}
487
488#[cfg(test)]
489mod tests {
490    use crate::RecycleBox;
491    use std::cell::Cell;
492    use std::fmt::Debug;
493    use std::mem;
494    use std::rc::Rc;
495
496    trait TestTrait: Debug {
497        fn name(&self) -> &'static str;
498    }
499
500    #[derive(Debug)]
501    struct TestLoad<T> {
502        load: T,
503        name: &'static str,
504        counter: Rc<Cell<usize>>,
505    }
506    impl<T> TestLoad<T> {
507        fn new(load: T, name: &'static str, counter: Rc<Cell<usize>>) -> Self {
508            counter.set(counter.get() + 1);
509            Self {
510                load,
511                name,
512                counter,
513            }
514        }
515        fn load(&self) -> T
516        where
517            T: Clone,
518        {
519            self.load.clone()
520        }
521    }
522    impl<T: Debug> TestTrait for TestLoad<T> {
523        fn name(&self) -> &'static str {
524            self.name
525        }
526    }
527    impl<T: PartialEq> PartialEq for TestLoad<T> {
528        fn eq(&self, other: &Self) -> bool {
529            self.load == other.load
530        }
531    }
532    impl<T> Drop for TestLoad<T> {
533        fn drop(&mut self) {
534            self.counter.set(self.counter.get() - 1);
535        }
536    }
537
538    #[derive(Debug, PartialEq)]
539    struct EmptyTestLoad;
540    impl TestTrait for EmptyTestLoad {
541        fn name(&self) -> &'static str {
542            "Empty load"
543        }
544    }
545
546    fn has_same_location<T, U>(first: *const T, second: *const U) -> bool {
547        fn distance(a: usize, b: usize) -> usize {
548            if a > b {
549                a - b
550            } else {
551                b - a
552            }
553        }
554
555        distance(first as usize, second as usize)
556            <= distance(mem::align_of::<T>(), mem::align_of::<U>())
557    }
558
559    #[test]
560    fn test_new() {
561        let counter = Rc::new(Cell::new(0));
562        let v = TestLoad::new(5, "A", counter.clone());
563        let b = RecycleBox::new(v);
564        assert_eq!(b.as_ref().load(), 5);
565        assert_eq!(counter.get(), 1);
566        drop(b);
567        assert_eq!(counter.get(), 0);
568    }
569    #[test]
570    fn test_new_zero_sized() {
571        let v = EmptyTestLoad;
572        let _b = RecycleBox::new(v);
573    }
574    #[test]
575    fn test_recycle_with_smaller() {
576        let counter = Rc::new(Cell::new(0));
577        let v1 = TestLoad::new(5.0, "A", counter.clone());
578        let v2 = TestLoad::new(3, "B", counter.clone());
579        let b1 = RecycleBox::new(v1);
580        assert_eq!(counter.get(), 2);
581        let b1_ptr = &*b1 as *const TestLoad<f64>;
582        let b2 = RecycleBox::recycle(b1, v2);
583        assert_eq!(counter.get(), 1);
584        assert_eq!(b2.as_ref().load(), 3);
585        assert!(has_same_location(b1_ptr, &*b2));
586        drop(b2);
587        assert_eq!(counter.get(), 0);
588    }
589    #[test]
590    fn test_recycle_with_bigger() {
591        let counter = Rc::new(Cell::new(0));
592        let v1 = TestLoad::new(5, "A", counter.clone());
593        let v2 = TestLoad::new([1; 10], "B", counter.clone());
594        let b1 = RecycleBox::new(v1);
595        assert_eq!(counter.get(), 2);
596        let b2 = RecycleBox::recycle(b1, v2);
597        assert_eq!(counter.get(), 1);
598        assert_eq!(b2.as_ref().load(), [1; 10]);
599        drop(b2);
600        assert_eq!(counter.get(), 0);
601    }
602    #[test]
603    fn test_recycle_with_zero_sized() {
604        let counter = Rc::new(Cell::new(0));
605        let v1 = TestLoad::new(5, "A", counter.clone());
606        let v2 = EmptyTestLoad;
607        let b1 = RecycleBox::new(v1);
608        assert_eq!(counter.get(), 1);
609        let b1_ptr = &*b1 as *const TestLoad<i32>;
610        let b2 = RecycleBox::recycle(b1, v2);
611        assert!(has_same_location(b1_ptr, &*b2));
612        assert_eq!(counter.get(), 0);
613        assert_eq!(*b2.as_ref(), EmptyTestLoad);
614    }
615    #[test]
616    fn test_recycle_from_zero_sized() {
617        let counter = Rc::new(Cell::new(0));
618        let v1 = EmptyTestLoad;
619        let v2 = TestLoad::new(5, "B", counter.clone());
620        let b1 = RecycleBox::new(v1);
621        let b2 = RecycleBox::recycle(b1, v2);
622        assert_eq!(counter.get(), 1);
623        assert_eq!(b2.as_ref().load(), 5);
624        drop(b2);
625        assert_eq!(counter.get(), 0);
626    }
627    #[test]
628    fn test_vacate() {
629        let counter = Rc::new(Cell::new(0));
630        let v1 = TestLoad::new(5, "A", counter.clone());
631        let b1 = RecycleBox::new(v1);
632        assert_eq!(counter.get(), 1);
633        let b1_ptr = &*b1 as *const TestLoad<i32>;
634        let b2 = RecycleBox::vacate(b1);
635        assert_eq!(counter.get(), 0);
636        let v2 = TestLoad::new(5, "B", counter.clone());
637        let b3 = RecycleBox::recycle(b2, v2);
638        assert!(has_same_location(b1_ptr, &*b3));
639        assert_eq!(counter.get(), 1);
640        assert_eq!(b3.as_ref().load(), 5);
641    }
642    #[test]
643    fn test_replace_with_smaller() {
644        let counter = Rc::new(Cell::new(0));
645        let v1 = TestLoad::new(5.0, "A", counter.clone());
646        let v2 = TestLoad::new(3, "B", counter.clone());
647        let b1 = RecycleBox::new(v1);
648        assert_eq!(counter.get(), 2);
649        let b1_ptr = &*b1 as *const TestLoad<f64>;
650        let (v1bis, b2) = RecycleBox::replace(b1, v2);
651        assert!(has_same_location(b1_ptr, &*b2));
652        assert_eq!(v1bis.load(), 5.0);
653        assert_eq!(counter.get(), 2);
654        assert_eq!(b2.as_ref().load(), 3);
655        drop(b2);
656        assert_eq!(counter.get(), 1);
657    }
658    #[test]
659    fn test_replace_with_bigger() {
660        let counter = Rc::new(Cell::new(0));
661        let v1 = TestLoad::new(5, "A", counter.clone());
662        let v2 = TestLoad::new([1; 10], "B", counter.clone());
663        let b1 = RecycleBox::new(v1);
664        assert_eq!(counter.get(), 2);
665        let (v1bis, b2) = RecycleBox::replace(b1, v2);
666        assert_eq!(v1bis.load(), 5);
667        assert_eq!(counter.get(), 2);
668        assert_eq!(b2.as_ref().load(), [1; 10]);
669        drop(b2);
670        assert_eq!(counter.get(), 1);
671    }
672    #[test]
673    fn test_replace_with_zero_sized() {
674        let counter = Rc::new(Cell::new(0));
675        let v1 = TestLoad::new(5, "A", counter.clone());
676        let v2 = EmptyTestLoad;
677        let b1 = RecycleBox::new(v1);
678        assert_eq!(counter.get(), 1);
679        let b1_ptr = &*b1 as *const TestLoad<i32>;
680        let (v1bis, b2) = RecycleBox::replace(b1, v2);
681        assert!(has_same_location(b1_ptr, &*b2));
682        assert_eq!(v1bis.load(), 5);
683        assert_eq!(counter.get(), 1);
684        assert_eq!(*b2.as_ref(), EmptyTestLoad);
685    }
686    #[test]
687    fn test_replace_from_zero_sized() {
688        let counter = Rc::new(Cell::new(0));
689        let v1 = EmptyTestLoad;
690        let v2 = TestLoad::new(5, "B", counter.clone());
691        let b1 = RecycleBox::new(v1);
692        let (v1bis, b2) = RecycleBox::replace(b1, v2);
693        assert_eq!(v1bis, EmptyTestLoad);
694        assert_eq!(counter.get(), 1);
695        assert_eq!(b2.as_ref().load(), 5);
696        drop(b2);
697        assert_eq!(counter.get(), 0);
698    }
699    #[test]
700    fn test_take() {
701        let counter = Rc::new(Cell::new(0));
702        let v1 = TestLoad::new(5, "A", counter.clone());
703        let b1 = RecycleBox::new(v1);
704        assert_eq!(counter.get(), 1);
705        let b1_ptr = &*b1 as *const TestLoad<i32>;
706        let (v1bis, b2) = RecycleBox::take(b1);
707        assert_eq!(v1bis.load(), 5);
708        assert_eq!(counter.get(), 1);
709        let v2 = TestLoad::new(5, "B", counter.clone());
710        let (bempty, b3) = RecycleBox::replace(b2, v2);
711        assert!(has_same_location(b1_ptr, &*b3));
712        assert_eq!(bempty, ());
713        assert_eq!(counter.get(), 2);
714        assert_eq!(b3.as_ref().load(), 5);
715    }
716    #[test]
717    fn test_coerce_unsized() {
718        let counter = Rc::new(Cell::new(0));
719        let v1 = TestLoad::new(0, "A", counter.clone());
720        let b1 = RecycleBox::new(v1);
721        let mut b_unsized: RecycleBox<dyn TestTrait> = coerce_box!(b1);
722        assert_eq!(counter.get(), 1);
723        assert_eq!(b_unsized.as_ref().name(), "A");
724
725        let v2 = TestLoad::new([0; 10], "B", counter.clone());
726        assert_eq!(counter.get(), 2);
727        b_unsized = coerce_box!(RecycleBox::recycle(b_unsized, v2));
728        assert_eq!(counter.get(), 1);
729        assert_eq!(b_unsized.as_ref().name(), "B");
730
731        let v3 = EmptyTestLoad;
732        b_unsized = coerce_box!(RecycleBox::recycle(b_unsized, v3));
733        assert_eq!(counter.get(), 0);
734        assert_eq!(b_unsized.as_ref().name(), "Empty load");
735    }
736}