smallbox/
smallbox.rs

1use core::any::Any;
2use core::cell::UnsafeCell;
3use core::cmp::Ordering;
4use core::fmt;
5use core::future::Future;
6use core::hash::Hash;
7use core::hash::{self};
8use core::hint::unreachable_unchecked;
9use core::marker::PhantomData;
10#[cfg(feature = "coerce")]
11use core::marker::Unsize;
12use core::mem::ManuallyDrop;
13use core::mem::MaybeUninit;
14use core::mem::{self};
15use core::ops;
16#[cfg(feature = "coerce")]
17use core::ops::CoerceUnsized;
18use core::pin::Pin;
19use core::ptr;
20use core::ptr::NonNull;
21
22use ::alloc::alloc;
23use ::alloc::alloc::Layout;
24use ::alloc::alloc::handle_alloc_error;
25use ::alloc::boxed::Box;
26
27use crate::sptr;
28
29/// A sentinel pointer that signals that the value is stored on the stack
30///
31/// It is never supposed to be dereferenced
32const INLINE_SENTINEL: *mut u8 = sptr::without_provenance_mut(0x1);
33
34/// Minimum alignment for allocations
35///
36/// Forcing a minimum alignment prevents the allocator
37/// from returning a pointer with the same address as `INLINE_SENTINEL`
38const MIN_ALIGNMENT: usize = 2;
39
40#[cfg(feature = "coerce")]
41impl<T: ?Sized + Unsize<U>, U: ?Sized, Space> CoerceUnsized<SmallBox<U, Space>>
42    for SmallBox<T, Space>
43{
44}
45
46/// Box value on stack or on heap depending on its size
47///
48/// This macro is similar to `SmallBox::new`, which is used to create a new [`SmallBox`] instance,
49/// but relaxes the constraint `T: Sized`.
50/// In order to do that, this macro will check the coercion rules from type `T` to
51/// the target type. This macro will invoke a compile-time error on any invalid type coercion.
52///
53/// You can think that it has the signature of `smallbox!<U: Sized, T: ?Sized>(val: U) ->
54/// SmallBox<T, Space>`
55///
56/// # Example
57///
58/// ```
59/// #[macro_use]
60/// extern crate smallbox;
61///
62/// # fn main() {
63/// use smallbox::SmallBox;
64/// use smallbox::space::*;
65///
66/// let small: SmallBox<[usize], S4> = smallbox!([0usize; 2]);
67/// let large: SmallBox<[usize], S4> = smallbox!([1usize; 8]);
68///
69/// assert_eq!(small.len(), 2);
70/// assert_eq!(large[7], 1);
71///
72/// assert!(large.is_heap() == true);
73/// # }
74/// ```
75#[macro_export]
76macro_rules! smallbox {
77    ( $e: expr ) => {{
78        let val = $e;
79        let ptr = ::core::ptr::addr_of!(val);
80        #[allow(unsafe_code)]
81        unsafe {
82            $crate::SmallBox::new_unchecked(val, ptr)
83        }
84    }};
85}
86
87/// An optimized box that store value on stack or on heap depending on its size
88pub struct SmallBox<T: ?Sized, Space> {
89    space: MaybeUninit<UnsafeCell<Space>>,
90    ptr: NonNull<T>,
91    _phantom: PhantomData<T>,
92}
93
94impl<T: Default, Space> Default for SmallBox<T, Space> {
95    fn default() -> Self {
96        Self::new(T::default())
97    }
98}
99
100impl<T: ?Sized, Space> SmallBox<T, Space> {
101    /// Box value on stack or on heap depending on its size.
102    ///
103    /// # Example
104    ///
105    /// ```
106    /// use smallbox::SmallBox;
107    /// use smallbox::space::*;
108    ///
109    /// let small: SmallBox<_, S4> = SmallBox::new([0usize; 2]);
110    /// let large: SmallBox<_, S4> = SmallBox::new([1usize; 8]);
111    ///
112    /// assert_eq!(small.len(), 2);
113    /// assert_eq!(large[7], 1);
114    ///
115    /// assert!(large.is_heap() == true);
116    /// ```
117    #[inline(always)]
118    pub fn new(val: T) -> SmallBox<T, Space>
119    where T: Sized {
120        smallbox!(val)
121    }
122
123    #[doc(hidden)]
124    #[inline]
125    pub unsafe fn new_unchecked<U>(val: U, ptr: *const T) -> SmallBox<T, Space>
126    where U: Sized {
127        let val = ManuallyDrop::new(val);
128        Self::new_copy(&val, ptr)
129    }
130
131    /// Change the capacity of [`SmallBox`].
132    ///
133    /// This method may move stack-allocated data from stack to heap
134    /// when inline space is not sufficient. Once the data
135    /// is moved to heap, it will never be moved back to stack.
136    ///
137    /// # Example
138    ///
139    /// ```
140    /// use smallbox::SmallBox;
141    /// use smallbox::space::S2;
142    /// use smallbox::space::S4;
143    ///
144    /// let s: SmallBox<_, S4> = SmallBox::new([0usize; 4]);
145    /// let m: SmallBox<_, S2> = s.resize();
146    /// ```
147    pub fn resize<ToSpace>(self) -> SmallBox<T, ToSpace> {
148        let this = ManuallyDrop::new(self);
149
150        if this.is_heap() {
151            // don't change anything if data is already on heap
152            let space = MaybeUninit::<UnsafeCell<ToSpace>>::uninit();
153            SmallBox {
154                space,
155                ptr: this.ptr,
156                _phantom: PhantomData,
157            }
158        } else {
159            let val: &T = &this;
160            unsafe { SmallBox::<T, ToSpace>::new_copy(val, sptr::from_ref(val)) }
161        }
162    }
163
164    /// Returns true if data is allocated on heap.
165    ///
166    /// # Example
167    ///
168    /// ```
169    /// use smallbox::SmallBox;
170    /// use smallbox::space::S1;
171    ///
172    /// let stacked: SmallBox<usize, S1> = SmallBox::new(0usize);
173    /// assert!(!stacked.is_heap());
174    ///
175    /// let heaped: SmallBox<(usize, usize), S1> = SmallBox::new((0usize, 1usize));
176    /// assert!(heaped.is_heap());
177    /// ```
178    #[inline]
179    pub fn is_heap(&self) -> bool {
180        self.ptr.as_ptr().cast::<u8>() != INLINE_SENTINEL
181    }
182
183    unsafe fn new_copy<U>(val: &U, metadata_ptr: *const T) -> SmallBox<T, Space>
184    where U: ?Sized {
185        let layout = Layout::for_value::<U>(val);
186        let space_layout = Layout::new::<Space>();
187
188        let mut space = MaybeUninit::<UnsafeCell<Space>>::uninit();
189
190        let (ptr_this, val_dst): (*mut u8, *mut u8) =
191            if layout.size() <= space_layout.size() && layout.align() <= space_layout.align() {
192                // Stack.
193                (INLINE_SENTINEL, space.as_mut_ptr().cast())
194            } else if layout.size() == 0 {
195                // ZST with alignment greater than Space, which will behave like being stored on
196                // heap but will not actually allocate.
197                (
198                    sptr::without_provenance_mut(layout.align()),
199                    sptr::without_provenance_mut(layout.align()),
200                )
201            } else {
202                // Heap.
203                let layout = layout
204                    // Safety: MIN_ALIGNMENT is 2, which is a valid power-of-two alignment.
205                    .align_to(MIN_ALIGNMENT)
206                    .unwrap_or_else(|_| unreachable_unchecked());
207                let heap_ptr = alloc::alloc(layout);
208
209                if heap_ptr.is_null() {
210                    handle_alloc_error(layout)
211                }
212
213                (heap_ptr, heap_ptr)
214            };
215
216        // `self.ptr` always holds the metadata, even if stack allocated.
217        let ptr = sptr::with_metadata_of_mut(ptr_this, metadata_ptr);
218        // Safety: ptr is either INLINE_SENTINEL or returned from the allocator and checked for
219        // null.
220        let ptr = NonNull::new_unchecked(ptr);
221
222        ptr::copy_nonoverlapping(sptr::from_ref(val).cast(), val_dst, layout.size());
223
224        SmallBox {
225            space,
226            ptr,
227            _phantom: PhantomData,
228        }
229    }
230
231    unsafe fn downcast_unchecked<U: Any>(self) -> SmallBox<U, Space> {
232        let this = ManuallyDrop::new(self);
233
234        let size = mem::size_of::<U>();
235        let mut space = MaybeUninit::<UnsafeCell<Space>>::uninit();
236
237        if !this.is_heap() {
238            ptr::copy_nonoverlapping::<u8>(
239                this.space.as_ptr().cast(),
240                space.as_mut_ptr().cast(),
241                size,
242            );
243        };
244
245        let ptr = this.ptr.cast();
246
247        SmallBox {
248            space,
249            ptr,
250            _phantom: PhantomData,
251        }
252    }
253
254    #[inline]
255    unsafe fn as_ptr(&self) -> *const T {
256        if self.is_heap() {
257            self.ptr.as_ptr()
258        } else {
259            sptr::with_metadata_of(self.space.as_ptr(), self.ptr.as_ptr())
260        }
261    }
262
263    #[inline]
264    unsafe fn as_mut_ptr(&mut self) -> *mut T {
265        if self.is_heap() {
266            self.ptr.as_ptr()
267        } else {
268            sptr::with_metadata_of_mut(self.space.as_mut_ptr(), self.ptr.as_ptr())
269        }
270    }
271
272    /// Consumes the SmallBox and returns ownership of the boxed value
273    ///
274    /// # Examples
275    /// ```
276    /// use smallbox::SmallBox;
277    /// use smallbox::space::S1;
278    ///
279    /// let stacked: SmallBox<_, S1> = SmallBox::new([21usize]);
280    /// let val = stacked.into_inner();
281    /// assert_eq!(val[0], 21);
282    ///
283    /// let boxed: SmallBox<_, S1> = SmallBox::new(vec![21, 56, 420]);
284    /// let val = boxed.into_inner();
285    /// assert_eq!(val[1], 56);
286    /// ```
287    #[inline]
288    pub fn into_inner(self) -> T
289    where T: Sized {
290        let this = ManuallyDrop::new(self);
291        let ret_val: T = unsafe { this.as_ptr().read() };
292
293        // Just deallocates the heap memory without dropping the boxed value
294        if this.is_heap() && mem::size_of::<T>() != 0 {
295            // Safety: MIN_ALIGNMENT is 2, aligning to 2 should not create an invalid layout
296            let layout = unsafe {
297                Layout::new::<T>()
298                    .align_to(MIN_ALIGNMENT)
299                    .unwrap_or_else(|_| unreachable_unchecked())
300            };
301            unsafe {
302                alloc::dealloc(this.ptr.as_ptr().cast::<u8>(), layout);
303            }
304        }
305
306        ret_val
307    }
308
309    /// Creates a [`SmallBox`] from a standard [`Box`].
310    ///
311    /// The data will always be stored on the heap since it's already allocated there.
312    /// This method transfers ownership from the [`Box`] to the [`SmallBox`] without copying
313    /// or moving the data.
314    ///
315    /// # Example
316    ///
317    /// ```
318    /// # extern crate alloc;
319    /// # use alloc::boxed::Box;
320    ///
321    /// use smallbox::SmallBox;
322    /// use smallbox::space::S4;
323    ///
324    /// let boxed = Box::new([1, 2, 3, 4]);
325    /// let small_box: SmallBox<_, S4> = SmallBox::from_box(boxed);
326    ///
327    /// assert!(small_box.is_heap());
328    /// assert_eq!(*small_box, [1, 2, 3, 4]);
329    /// ```
330    pub fn from_box(boxed: ::alloc::boxed::Box<T>) -> Self {
331        unsafe {
332            let ptr = NonNull::new_unchecked(Box::into_raw(boxed));
333            let space = MaybeUninit::<UnsafeCell<Space>>::uninit();
334            SmallBox {
335                space,
336                ptr,
337                _phantom: PhantomData,
338            }
339        }
340    }
341
342    /// Converts a [`SmallBox`] into a standard [`Box`].
343    ///
344    /// If the data is stored on the stack, it will be moved to the heap.
345    /// If the data is already on the heap, ownership is transferred without
346    /// copying or moving the data.
347    ///
348    /// # Example
349    ///
350    /// ```
351    /// # extern crate alloc;
352    /// # use alloc::boxed::Box;
353    ///
354    /// use smallbox::SmallBox;
355    /// use smallbox::space::S4;
356    ///
357    /// let small_box: SmallBox<_, S4> = SmallBox::new([1, 2, 3, 4]);
358    /// let boxed: Box<[i32; 4]> = SmallBox::into_box(small_box);
359    ///
360    /// assert_eq!(*boxed, [1, 2, 3, 4]);
361    /// ```
362    pub fn into_box(boxed: SmallBox<T, Space>) -> ::alloc::boxed::Box<T> {
363        unsafe {
364            let mut enforce_heap = ManuallyDrop::new(boxed.resize::<()>());
365            debug_assert!(enforce_heap.is_heap());
366            Box::from_raw(enforce_heap.as_mut_ptr())
367        }
368    }
369}
370
371impl<Space> SmallBox<dyn Any, Space> {
372    /// Attempt to downcast the box to a concrete type.
373    ///
374    /// # Examples
375    ///
376    /// ```
377    /// #[macro_use]
378    /// extern crate smallbox;
379    ///
380    /// # fn main() {
381    /// use core::any::Any;
382    ///
383    /// use smallbox::SmallBox;
384    /// use smallbox::space::*;
385    ///
386    /// fn print_if_string(value: SmallBox<dyn Any, S1>) {
387    ///     if let Ok(string) = value.downcast::<String>() {
388    ///         println!("String ({}): {}", string.len(), string);
389    ///     }
390    /// }
391    ///
392    /// fn main() {
393    ///     let my_string = "Hello World".to_string();
394    ///     print_if_string(smallbox!(my_string));
395    ///     print_if_string(smallbox!(0i8));
396    /// }
397    /// # }
398    /// ```
399    #[inline]
400    pub fn downcast<T: Any>(self) -> Result<SmallBox<T, Space>, Self> {
401        if self.is::<T>() {
402            unsafe { Ok(self.downcast_unchecked()) }
403        } else {
404            Err(self)
405        }
406    }
407}
408
409impl<Space> SmallBox<dyn Any + Send, Space> {
410    /// Attempt to downcast the box to a concrete type.
411    ///
412    /// # Examples
413    ///
414    /// ```
415    /// #[macro_use]
416    /// extern crate smallbox;
417    ///
418    /// # fn main() {
419    /// use core::any::Any;
420    ///
421    /// use smallbox::SmallBox;
422    /// use smallbox::space::*;
423    ///
424    /// fn print_if_string(value: SmallBox<dyn Any, S1>) {
425    ///     if let Ok(string) = value.downcast::<String>() {
426    ///         println!("String ({}): {}", string.len(), string);
427    ///     }
428    /// }
429    ///
430    /// fn main() {
431    ///     let my_string = "Hello World".to_string();
432    ///     print_if_string(smallbox!(my_string));
433    ///     print_if_string(smallbox!(0i8));
434    /// }
435    /// # }
436    /// ```
437    #[inline]
438    pub fn downcast<T: Any>(self) -> Result<SmallBox<T, Space>, Self> {
439        if self.is::<T>() {
440            unsafe { Ok(self.downcast_unchecked()) }
441        } else {
442            Err(self)
443        }
444    }
445}
446
447impl<T: ?Sized, Space> ops::Deref for SmallBox<T, Space> {
448    type Target = T;
449
450    fn deref(&self) -> &T {
451        unsafe { &*self.as_ptr() }
452    }
453}
454
455impl<T: ?Sized, Space> ops::DerefMut for SmallBox<T, Space> {
456    fn deref_mut(&mut self) -> &mut T {
457        unsafe { &mut *self.as_mut_ptr() }
458    }
459}
460
461impl<T: ?Sized, Space> ops::Drop for SmallBox<T, Space> {
462    fn drop(&mut self) {
463        unsafe {
464            let layout = Layout::for_value::<T>(&*self)
465                .align_to(MIN_ALIGNMENT)
466                .unwrap_or_else(|_| unreachable_unchecked());
467
468            ptr::drop_in_place::<T>(&mut **self);
469            if self.is_heap() && layout.size() != 0 {
470                alloc::dealloc(self.ptr.as_ptr().cast::<u8>(), layout);
471            }
472        }
473    }
474}
475
476impl<T: Clone, Space> Clone for SmallBox<T, Space>
477where T: Sized
478{
479    fn clone(&self) -> Self {
480        let val: &T = self;
481        SmallBox::new(val.clone())
482    }
483}
484
485impl<T: ?Sized + fmt::Display, Space> fmt::Display for SmallBox<T, Space> {
486    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
487        fmt::Display::fmt(&**self, f)
488    }
489}
490
491impl<T: ?Sized + fmt::Debug, Space> fmt::Debug for SmallBox<T, Space> {
492    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
493        fmt::Debug::fmt(&**self, f)
494    }
495}
496
497impl<T: ?Sized, Space> fmt::Pointer for SmallBox<T, Space> {
498    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
499        // It's not possible to extract the inner Unique directly from the Box,
500        // instead we cast it to a *const which aliases the Unique
501        let ptr: *const T = &**self;
502        fmt::Pointer::fmt(&ptr, f)
503    }
504}
505
506impl<T: ?Sized + PartialEq, Space> PartialEq for SmallBox<T, Space> {
507    fn eq(&self, other: &SmallBox<T, Space>) -> bool {
508        PartialEq::eq(&**self, &**other)
509    }
510}
511
512impl<T: ?Sized + PartialOrd, Space> PartialOrd for SmallBox<T, Space> {
513    fn partial_cmp(&self, other: &SmallBox<T, Space>) -> Option<Ordering> {
514        PartialOrd::partial_cmp(&**self, &**other)
515    }
516    fn lt(&self, other: &SmallBox<T, Space>) -> bool {
517        PartialOrd::lt(&**self, &**other)
518    }
519    fn le(&self, other: &SmallBox<T, Space>) -> bool {
520        PartialOrd::le(&**self, &**other)
521    }
522    fn ge(&self, other: &SmallBox<T, Space>) -> bool {
523        PartialOrd::ge(&**self, &**other)
524    }
525    fn gt(&self, other: &SmallBox<T, Space>) -> bool {
526        PartialOrd::gt(&**self, &**other)
527    }
528}
529
530impl<T: ?Sized + Ord, Space> Ord for SmallBox<T, Space> {
531    fn cmp(&self, other: &SmallBox<T, Space>) -> Ordering {
532        Ord::cmp(&**self, &**other)
533    }
534}
535
536impl<T: ?Sized + Eq, Space> Eq for SmallBox<T, Space> {}
537
538impl<T: ?Sized + Hash, Space> Hash for SmallBox<T, Space> {
539    fn hash<H: hash::Hasher>(&self, state: &mut H) {
540        (**self).hash(state);
541    }
542}
543
544// We can implement Future for SmallBox soundly, even though it's not implemented for std Box.
545// The reason why it's not implemented for std Box is only because Box<T>: Unpin unconditionally,
546// even when T: !Unpin, which always allows getting &mut Box<T> from Pin<&mut Box<T>>.
547// For SmallBox, this is not the case, because it might carry the data on the stack, so if T:
548// !Unpin, then SmallBox<T>: !Unpin also. That means you can't get &mut SmallBox<T> from Pin<&mut
549// SmallBox<T>> in safe code, so it's safe to implement Future for SmallBox directly.
550impl<F: Future + ?Sized, S> Future for SmallBox<F, S> {
551    type Output = F::Output;
552
553    fn poll(
554        self: Pin<&mut Self>,
555        cx: &mut core::task::Context<'_>,
556    ) -> core::task::Poll<Self::Output> {
557        // Safety: When the SmallBox is pinned, the data on the stack is pinned.
558        // The data on the heap is also pinned naturally, so all Pin guarantees are satisfied.
559        unsafe { Pin::new_unchecked(&mut **self.get_unchecked_mut()) }.poll(cx)
560    }
561}
562
563unsafe impl<T: ?Sized + Send, Space> Send for SmallBox<T, Space> {}
564unsafe impl<T: ?Sized + Sync, Space> Sync for SmallBox<T, Space> {}
565
566#[cfg(test)]
567mod tests {
568    use core::any::Any;
569    use core::mem;
570    use core::ptr::addr_of;
571
572    use ::alloc::boxed::Box;
573    use ::alloc::vec;
574    use ::alloc::vec::Vec;
575
576    use super::SmallBox;
577    use crate::space::*;
578
579    #[test]
580    fn test_basic() {
581        let stacked: SmallBox<usize, S1> = SmallBox::new(1234usize);
582        assert!(*stacked == 1234);
583
584        let heaped: SmallBox<(usize, usize), S1> = SmallBox::new((0, 1));
585        assert!(*heaped == (0, 1));
586    }
587
588    #[test]
589    fn test_new_unchecked() {
590        let val = [0usize, 1];
591        let ptr = addr_of!(val);
592
593        unsafe {
594            let stacked: SmallBox<[usize], S2> = SmallBox::new_unchecked(val, ptr);
595            assert!(*stacked == [0, 1]);
596            assert!(!stacked.is_heap());
597        }
598
599        let val = [0usize, 1, 2];
600        let ptr = addr_of!(val);
601
602        unsafe {
603            let heaped: SmallBox<dyn Any, S2> = SmallBox::new_unchecked(val, ptr);
604            assert!(heaped.is_heap());
605
606            if let Some(array) = heaped.downcast_ref::<[usize; 3]>() {
607                assert_eq!(*array, [0, 1, 2]);
608            } else {
609                unreachable!();
610            }
611        }
612    }
613
614    #[test]
615    #[deny(unsafe_code)]
616    fn test_macro() {
617        let stacked: SmallBox<dyn Any, S1> = smallbox!(1234usize);
618        if let Some(num) = stacked.downcast_ref::<usize>() {
619            assert_eq!(*num, 1234);
620        } else {
621            unreachable!();
622        }
623
624        let heaped: SmallBox<dyn Any, S1> = smallbox!([0usize, 1]);
625        if let Some(array) = heaped.downcast_ref::<[usize; 2]>() {
626            assert_eq!(*array, [0, 1]);
627        } else {
628            unreachable!();
629        }
630
631        let is_even: SmallBox<dyn Fn(u8) -> bool, S1> = smallbox!(|num: u8| num % 2 == 0);
632        assert!(!is_even(5));
633        assert!(is_even(6));
634    }
635
636    #[test]
637    #[cfg(feature = "coerce")]
638    fn test_coerce() {
639        let stacked: SmallBox<dyn Any, S1> = SmallBox::new(1234usize);
640        if let Some(num) = stacked.downcast_ref::<usize>() {
641            assert_eq!(*num, 1234);
642        } else {
643            unreachable!();
644        }
645
646        let heaped: SmallBox<dyn Any, S1> = SmallBox::new([0usize, 1]);
647        if let Some(array) = heaped.downcast_ref::<[usize; 2]>() {
648            assert_eq!(*array, [0, 1]);
649        } else {
650            unreachable!();
651        }
652    }
653
654    #[test]
655    fn test_drop() {
656        use core::cell::Cell;
657
658        #[allow(dead_code)]
659        struct Struct<'a>(&'a Cell<bool>, u8);
660        impl<'a> Drop for Struct<'a> {
661            fn drop(&mut self) {
662                self.0.set(true);
663            }
664        }
665
666        let flag = Cell::new(false);
667        let stacked: SmallBox<_, S2> = SmallBox::new(Struct(&flag, 0));
668        assert!(!stacked.is_heap());
669        assert!(!flag.get());
670        drop(stacked);
671        assert!(flag.get());
672
673        let flag = Cell::new(false);
674        let heaped: SmallBox<_, S1> = SmallBox::new(Struct(&flag, 0));
675        assert!(heaped.is_heap());
676        assert!(!flag.get());
677        drop(heaped);
678        assert!(flag.get());
679    }
680
681    #[test]
682    fn test_dont_drop_space() {
683        #[allow(dead_code)]
684        struct NoDrop(S1);
685        impl Drop for NoDrop {
686            fn drop(&mut self) {
687                unreachable!();
688            }
689        }
690
691        drop(SmallBox::<_, NoDrop>::new([true]));
692    }
693
694    #[test]
695    fn test_oversize() {
696        let fit = SmallBox::<_, S1>::new([1usize]);
697        let oversize = SmallBox::<_, S1>::new([1usize, 2]);
698        assert!(!fit.is_heap());
699        assert!(oversize.is_heap());
700    }
701
702    #[test]
703    fn test_resize() {
704        let m = SmallBox::<_, S4>::new([1usize, 2]);
705        let l = m.resize::<S8>();
706        assert!(!l.is_heap());
707        let m = l.resize::<S4>();
708        assert!(!m.is_heap());
709        let s = m.resize::<S2>();
710        assert!(!s.is_heap());
711        let xs = s.resize::<S1>();
712        assert!(xs.is_heap());
713        let m = xs.resize::<S4>();
714        assert!(m.is_heap());
715        assert_eq!(*m, [1usize, 2]);
716    }
717
718    #[test]
719    fn test_clone() {
720        let stacked: SmallBox<[usize; 2], S2> = smallbox!([1usize, 2]);
721        assert_eq!(stacked, stacked.clone())
722    }
723
724    #[test]
725    fn test_zst() {
726        struct ZSpace;
727
728        let zst: SmallBox<[usize], S1> = smallbox!([1usize; 0]);
729        assert_eq!(*zst, [1usize; 0]);
730
731        let zst: SmallBox<[usize], ZSpace> = smallbox!([1usize; 0]);
732        assert_eq!(*zst, [1usize; 0]);
733        let zst: SmallBox<[usize], ZSpace> = smallbox!([1usize; 2]);
734        assert_eq!(*zst, [1usize; 2]);
735    }
736
737    #[test]
738    fn test_downcast() {
739        let stacked: SmallBox<dyn Any, S1> = smallbox!(0x01u32);
740        assert!(!stacked.is_heap());
741        assert_eq!(SmallBox::new(0x01), stacked.downcast::<u32>().unwrap());
742
743        let heaped: SmallBox<dyn Any, S1> = smallbox!([1usize, 2]);
744        assert!(heaped.is_heap());
745        assert_eq!(
746            smallbox!([1usize, 2]),
747            heaped.downcast::<[usize; 2]>().unwrap()
748        );
749
750        let stacked_send: SmallBox<dyn Any + Send, S1> = smallbox!(0x01u32);
751        assert!(!stacked_send.is_heap());
752        assert_eq!(SmallBox::new(0x01), stacked_send.downcast::<u32>().unwrap());
753
754        let heaped_send: SmallBox<dyn Any + Send, S1> = smallbox!([1usize, 2]);
755        assert!(heaped_send.is_heap());
756        assert_eq!(
757            SmallBox::new([1usize, 2]),
758            heaped_send.downcast::<[usize; 2]>().unwrap()
759        );
760
761        let mismatched: SmallBox<dyn Any, S1> = smallbox!(0x01u32);
762        assert!(mismatched.downcast::<u8>().is_err());
763        let mismatched: SmallBox<dyn Any, S1> = smallbox!(0x01u32);
764        assert!(mismatched.downcast::<u64>().is_err());
765    }
766
767    #[test]
768    fn test_option_encoding() {
769        let tester: SmallBox<Box<()>, S2> = SmallBox::new(Box::new(()));
770        assert!(Some(tester).is_some());
771    }
772
773    #[test]
774    fn test_into_inner() {
775        let tester: SmallBox<_, S1> = SmallBox::new([21usize]);
776        let val = tester.into_inner();
777        assert_eq!(val[0], 21);
778
779        let tester: SmallBox<_, S1> = SmallBox::new(vec![21, 56, 420]);
780        let val = tester.into_inner();
781        assert_eq!(val[1], 56);
782    }
783
784    #[test]
785    fn test_interior_mutability() {
786        use core::cell::Cell;
787        let cellbox = SmallBox::<Cell<u32>, S1>::new(Cell::new(0));
788        assert!(!cellbox.is_heap());
789        cellbox.set(1);
790        assert_eq!(cellbox.get(), 1);
791    }
792
793    #[test]
794    fn test_future() {
795        let boxed_fut: SmallBox<_, S1> = SmallBox::new(async { 123 });
796
797        assert_eq!(futures::executor::block_on(boxed_fut), 123);
798    }
799
800    #[test]
801    fn test_variance() {
802        #[allow(dead_code)]
803        fn test<'short, 'long: 'short>(val: SmallBox<&'long str, S1>) -> SmallBox<&'short str, S1> {
804            val
805        }
806    }
807
808    #[test]
809    fn test_overaligned_zst() {
810        #[repr(align(512))]
811        #[derive(Debug, PartialEq, Eq)]
812        struct OveralignedZst;
813
814        let zst: SmallBox<OveralignedZst, S1> = smallbox!(OveralignedZst);
815        #[allow(clippy::as_conversions)]
816        let zst_addr = addr_of!(*zst) as usize;
817        assert_eq!(*zst, OveralignedZst);
818        assert_eq!(zst_addr % 512, 0);
819    }
820
821    #[test]
822    fn test_overaligned_zst_dyn() {
823        #[repr(align(512))]
824        #[derive(Debug, PartialEq, Eq)]
825        struct OveralignedZst;
826
827        trait Foo {}
828
829        impl Foo for OveralignedZst {}
830
831        let zst: SmallBox<dyn Foo, S1> = smallbox!(OveralignedZst);
832        #[allow(clippy::as_conversions)]
833        let zst_addr = addr_of!(*zst) as *const () as usize;
834        assert_eq!(zst_addr % 512, 0);
835    }
836
837    #[test]
838    fn test_null_ptr_optimization() {
839        assert_eq!(
840            mem::size_of::<SmallBox<i32, S1>>(),
841            mem::size_of::<Option<SmallBox<i32, S1>>>()
842        );
843    }
844
845    #[test]
846    fn test_box_roundtrip() {
847        // Box -> SmallBox -> Box
848        let original_data = vec![1, 2, 3, 4, 5];
849        let original_box: Box<dyn Any> = Box::new(original_data.clone());
850
851        let intermediate_small_box: SmallBox<dyn Any, S4> = SmallBox::from_box(original_box);
852        assert!(intermediate_small_box.is_heap());
853
854        let final_box: Box<dyn Any> = SmallBox::into_box(intermediate_small_box);
855        let final_data: &Vec<i32> = final_box.downcast_ref().unwrap();
856        assert_eq!(original_data, *final_data);
857
858        // SmallBox -> Box -> SmallBox
859        let original_small_box: SmallBox<dyn Any, S4> = smallbox!(original_data.clone());
860        assert!(!original_small_box.is_heap());
861
862        let intermediate_box: Box<dyn Any> = SmallBox::into_box(original_small_box);
863
864        let final_small_box: SmallBox<dyn Any, S4> = SmallBox::from_box(intermediate_box);
865        assert!(final_small_box.is_heap());
866        let final_data: &Vec<i32> = final_small_box.downcast_ref().unwrap();
867        assert_eq!(original_data, *final_data);
868    }
869}