smallvec/
lib.rs

1// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
2// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
3// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
4// option. This file may not be copied, modified, or distributed
5// except according to those terms.
6
7//! Small vectors in various sizes. These store a certain number of elements inline, and fall back
8//! to the heap for larger allocations.  This can be a useful optimization for improving cache
9//! locality and reducing allocator traffic for workloads that fit within the inline buffer.
10//!
11//! ## `no_std` support
12//!
13//! By default, `smallvec` does not depend on `std`.  However, the optional
14//! `write` feature implements the `std::io::Write` trait for vectors of `u8`.
15//! When this feature is enabled, `smallvec` depends on `std`.
16//!
17//! ## Optional features
18//!
19//! ### `std`
20//!
21//! When this feature is enabled, traits available from `std` are implemented:
22//!
23//! * `SmallVec<u8, _>` implements the [`std::io::Write`] trait.
24//! * [`CollectionAllocErr`] implements [`std::error::Error`].
25//!
26//! This feature is not compatible with `#![no_std]` programs.
27//!
28//! ### `serde`
29//!
30//! When this optional dependency is enabled, `SmallVec` implements the `serde::Serialize` and
31//! `serde::Deserialize` traits.
32//!
33//! ### `extract_if`
34//!
35//! **This feature is unstable.** It may change to match the unstable `extract_if` method in libstd.
36//!
37//! Enables the `extract_if` method, which produces an iterator that calls a user-provided
38//! closure to determine which elements of the vector to remove and yield from the iterator.
39//!
40//! ### `specialization`
41//!
42//! **This feature is unstable and requires a nightly build of the Rust toolchain.**
43//!
44//! When this feature is enabled, `SmallVec::from(slice)` has improved performance for slices
45//! of `Copy` types.  (Without this feature, you can use `SmallVec::from_slice` to get optimal
46//! performance for `Copy` types.)
47//!
48//! Tracking issue: [rust-lang/rust#31844](https://github.com/rust-lang/rust/issues/31844)
49//!
50//! ### `may_dangle`
51//!
52//! **This feature is unstable and requires a nightly build of the Rust toolchain.**
53//!
54//! This feature makes the Rust compiler less strict about use of vectors that contain borrowed
55//! references. For details, see the
56//! [Rustonomicon](https://doc.rust-lang.org/1.42.0/nomicon/dropck.html#an-escape-hatch).
57//!
58//! Tracking issue: [rust-lang/rust#34761](https://github.com/rust-lang/rust/issues/34761)
59
60#![no_std]
61#![cfg_attr(docsrs, feature(doc_cfg))]
62#![cfg_attr(feature = "specialization", allow(incomplete_features))]
63#![cfg_attr(feature = "specialization", feature(specialization))]
64#![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))]
65
66#[doc(hidden)]
67pub extern crate alloc;
68
69#[cfg(any(test, feature = "std"))]
70extern crate std;
71
72#[cfg(test)]
73mod tests;
74
75use alloc::boxed::Box;
76use alloc::vec;
77use alloc::vec::Vec;
78
79use alloc::alloc::Layout;
80use core::borrow::Borrow;
81use core::borrow::BorrowMut;
82use core::fmt::Debug;
83use core::hash::{Hash, Hasher};
84use core::marker::PhantomData;
85use core::mem::align_of;
86use core::mem::size_of;
87use core::mem::ManuallyDrop;
88use core::mem::MaybeUninit;
89use core::ptr::addr_of;
90use core::ptr::addr_of_mut;
91use core::ptr::copy;
92use core::ptr::copy_nonoverlapping;
93use core::ptr::NonNull;
94
95#[cfg(feature = "bytes")]
96use bytes::{buf::UninitSlice, BufMut};
97#[cfg(feature = "malloc_size_of")]
98use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
99#[cfg(feature = "serde")]
100use serde::{
101    de::{Deserialize, Deserializer, SeqAccess, Visitor},
102    ser::{Serialize, SerializeSeq, Serializer},
103};
104#[cfg(feature = "std")]
105use std::io;
106
107/// Error type for APIs with fallible heap allocation
108#[derive(Debug)]
109pub enum CollectionAllocErr {
110    /// Overflow `usize::MAX` or other error during size computation
111    CapacityOverflow,
112    /// The allocator return an error
113    AllocErr {
114        /// The layout that was passed to the allocator
115        layout: Layout,
116    },
117}
118impl core::fmt::Display for CollectionAllocErr {
119    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
120        write!(f, "Allocation error: {:?}", self)
121    }
122}
123
124#[cfg(feature = "std")]
125#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
126impl std::error::Error for CollectionAllocErr {}
127
128/// Either a stack array with `length <= N` or a heap array
129/// whose pointer and capacity are stored here.
130///
131/// We store a `NonNull<T>` instead of a `*mut T`, so that
132/// niche-optimization can be performed and the type is covariant
133/// with respect to `T`.
134#[repr(C)]
135pub union RawSmallVec<T, const N: usize> {
136    inline: ManuallyDrop<MaybeUninit<[T; N]>>,
137    heap: (NonNull<T>, usize),
138}
139
140#[inline]
141fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T {
142    match result {
143        Ok(x) => x,
144        Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
145        Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout),
146    }
147}
148
149impl<T, const N: usize> RawSmallVec<T, N> {
150    #[inline]
151    const fn is_zst() -> bool {
152        size_of::<T>() == 0
153    }
154
155    #[inline]
156    const fn new() -> Self {
157        Self::new_inline(MaybeUninit::uninit())
158    }
159    #[inline]
160    const fn new_inline(inline: MaybeUninit<[T; N]>) -> Self {
161        Self {
162            inline: ManuallyDrop::new(inline),
163        }
164    }
165    #[inline]
166    const fn new_heap(ptr: NonNull<T>, capacity: usize) -> Self {
167        Self {
168            heap: (ptr, capacity),
169        }
170    }
171
172    #[inline]
173    const fn as_ptr_inline(&self) -> *const T {
174        // SAFETY: This is safe because we don't read the value. We only get a pointer to the data.
175        // Dereferencing the pointer is unsafe so unsafe code is required to misuse the return
176        // value.
177        (unsafe { addr_of!(self.inline) }) as *const T
178    }
179
180    #[inline]
181    fn as_mut_ptr_inline(&mut self) -> *mut T {
182        // SAFETY: See above.
183        (unsafe { addr_of_mut!(self.inline) }) as *mut T
184    }
185
186    /// # Safety
187    ///
188    /// The vector must be on the heap
189    #[inline]
190    const unsafe fn as_ptr_heap(&self) -> *const T {
191        self.heap.0.as_ptr()
192    }
193
194    /// # Safety
195    ///
196    /// The vector must be on the heap
197    #[inline]
198    unsafe fn as_mut_ptr_heap(&mut self) -> *mut T {
199        self.heap.0.as_ptr()
200    }
201
202    /// # Safety
203    ///
204    /// `new_capacity` must be non zero, and greater or equal to the length.
205    /// T must not be a ZST.
206    unsafe fn try_grow_raw(
207        &mut self,
208        len: TaggedLen,
209        new_capacity: usize,
210    ) -> Result<(), CollectionAllocErr> {
211        use alloc::alloc::{alloc, realloc};
212        debug_assert!(!Self::is_zst());
213        debug_assert!(new_capacity > 0);
214        debug_assert!(new_capacity >= len.value(Self::is_zst()));
215
216        let was_on_heap = len.on_heap(Self::is_zst());
217        let ptr = if was_on_heap {
218            self.as_mut_ptr_heap()
219        } else {
220            self.as_mut_ptr_inline()
221        };
222        let len = len.value(Self::is_zst());
223
224        let new_layout =
225            Layout::array::<T>(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?;
226        if new_layout.size() > isize::MAX as usize {
227            return Err(CollectionAllocErr::CapacityOverflow);
228        }
229
230        let new_ptr = if len == 0 || !was_on_heap {
231            // get a fresh allocation
232            let new_ptr = alloc(new_layout) as *mut T; // `new_layout` has nonzero size.
233            let new_ptr =
234                NonNull::new(new_ptr).ok_or(CollectionAllocErr::AllocErr { layout: new_layout })?;
235            copy_nonoverlapping(ptr, new_ptr.as_ptr(), len);
236            new_ptr
237        } else {
238            // use realloc
239
240            // this can't overflow since we already constructed an equivalent layout during
241            // the previous allocation
242            let old_layout =
243                Layout::from_size_align_unchecked(self.heap.1 * size_of::<T>(), align_of::<T>());
244
245            // SAFETY: ptr was allocated with this allocator
246            // old_layout is the same as the layout used to allocate the previous memory block
247            // new_layout.size() is greater than zero
248            // does not overflow when rounded up to alignment. since it was constructed
249            // with Layout::array
250            let new_ptr = realloc(ptr as *mut u8, old_layout, new_layout.size()) as *mut T;
251            NonNull::new(new_ptr).ok_or(CollectionAllocErr::AllocErr { layout: new_layout })?
252        };
253        *self = Self::new_heap(new_ptr, new_capacity);
254        Ok(())
255    }
256}
257
258/// Vec guarantees that its length is always less than [`isize::MAX`] in *bytes*.
259///
260/// For a non ZST, this means that the length is less than `isize::MAX` objects, which implies we
261/// have at least one free bit we can use. We use the least significant bit for the tag. And store
262/// the length in the `usize::BITS - 1` most significant bits.
263///
264/// For a ZST, we never use the heap, so we just store the length directly.
265#[repr(transparent)]
266#[derive(Clone, Copy)]
267struct TaggedLen(usize);
268
269impl TaggedLen {
270    #[inline]
271    pub const fn new(len: usize, on_heap: bool, is_zst: bool) -> Self {
272        if is_zst {
273            debug_assert!(!on_heap);
274            TaggedLen(len)
275        } else {
276            debug_assert!(len < isize::MAX as usize);
277            TaggedLen((len << 1) | on_heap as usize)
278        }
279    }
280
281    #[inline]
282    #[must_use]
283    pub const fn on_heap(self, is_zst: bool) -> bool {
284        if is_zst {
285            false
286        } else {
287            (self.0 & 1_usize) == 1
288        }
289    }
290
291    #[inline]
292    pub const fn value(self, is_zst: bool) -> usize {
293        if is_zst {
294            self.0
295        } else {
296            self.0 >> 1
297        }
298    }
299}
300
301#[repr(C)]
302pub struct SmallVec<T, const N: usize> {
303    len: TaggedLen,
304    raw: RawSmallVec<T, N>,
305    _marker: PhantomData<T>,
306}
307
308unsafe impl<T: Send, const N: usize> Send for SmallVec<T, N> {}
309unsafe impl<T: Sync, const N: usize> Sync for SmallVec<T, N> {}
310
311/// An iterator that removes the items from a `SmallVec` and yields them by value.
312///
313/// Returned from [`SmallVec::drain`][1].
314///
315/// [1]: struct.SmallVec.html#method.drain
316pub struct Drain<'a, T: 'a, const N: usize> {
317    // `vec` points to a valid object within its lifetime.
318    // This is ensured by the fact that we're holding an iterator to its items.
319    //
320    // # Safety
321    //
322    // Members in vec[tail_start..tail_start + tail_len] are initialized
323    // even though vec has length < tail_start
324    tail_start: usize,
325    tail_len: usize,
326    iter: core::slice::Iter<'a, T>,
327    vec: core::ptr::NonNull<SmallVec<T, N>>,
328}
329
330impl<'a, T: 'a, const N: usize> Iterator for Drain<'a, T, N> {
331    type Item = T;
332
333    #[inline]
334    fn next(&mut self) -> Option<T> {
335        // SAFETY: we shrunk the length of the vector so it no longer owns these items, and we can
336        // take ownership of them.
337        self.iter
338            .next()
339            .map(|reference| unsafe { core::ptr::read(reference) })
340    }
341
342    #[inline]
343    fn size_hint(&self) -> (usize, Option<usize>) {
344        self.iter.size_hint()
345    }
346}
347
348impl<'a, T: 'a, const N: usize> DoubleEndedIterator for Drain<'a, T, N> {
349    #[inline]
350    fn next_back(&mut self) -> Option<T> {
351        // SAFETY: see above
352        self.iter
353            .next_back()
354            .map(|reference| unsafe { core::ptr::read(reference) })
355    }
356}
357
358impl<T, const N: usize> ExactSizeIterator for Drain<'_, T, N> {
359    #[inline]
360    fn len(&self) -> usize {
361        self.iter.len()
362    }
363}
364
365impl<T, const N: usize> core::iter::FusedIterator for Drain<'_, T, N> {}
366
367impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> {
368    fn drop(&mut self) {
369        if core::mem::needs_drop::<T>() {
370            self.for_each(drop);
371        }
372
373        if self.tail_len > 0 {
374            // SAFETY: we're copying initialized members back to the end of the vector
375            // then updating its length
376            unsafe {
377                let source_vec = self.vec.as_mut();
378
379                let start = source_vec.len();
380                let tail = self.tail_start;
381                if tail != start {
382                    // as_mut_ptr creates a &mut, invalidating other pointers.
383                    // This pattern avoids calling it with a pointer already present.
384                    let ptr = source_vec.as_mut_ptr();
385                    let src = ptr.add(tail);
386                    let dst = ptr.add(start);
387                    copy(src, dst, self.tail_len);
388                }
389                source_vec.set_len(start + self.tail_len);
390            }
391        }
392    }
393}
394
395#[cfg(feature = "extract_if")]
396/// An iterator which uses a closure to determine if an element should be removed.
397///
398/// Returned from [`SmallVec::extract_if`][1].
399///
400/// [1]: struct.SmallVec.html#method.extract_if
401pub struct ExtractIf<'a, T, const N: usize, F>
402where
403    F: FnMut(&mut T) -> bool,
404{
405    vec: &'a mut SmallVec<T, N>,
406    /// The index of the item that will be inspected by the next call to `next`.
407    idx: usize,
408    /// The number of items that have been drained (removed) thus far.
409    del: usize,
410    /// The original length of `vec` prior to draining.
411    old_len: usize,
412    /// The filter test predicate.
413    pred: F,
414}
415
416#[cfg(feature = "extract_if")]
417impl<T, const N: usize, F> core::fmt::Debug for ExtractIf<'_, T, N, F>
418where
419    F: FnMut(&mut T) -> bool,
420    T: core::fmt::Debug,
421{
422    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
423        f.debug_tuple("ExtractIf")
424            .field(&self.vec.as_slice())
425            .finish()
426    }
427}
428
429#[cfg(feature = "extract_if")]
430impl<T, F, const N: usize> Iterator for ExtractIf<'_, T, N, F>
431where
432    F: FnMut(&mut T) -> bool,
433{
434    type Item = T;
435
436    fn next(&mut self) -> Option<T> {
437        unsafe {
438            while self.idx < self.old_len {
439                let i = self.idx;
440                let v = core::slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
441                let drained = (self.pred)(&mut v[i]);
442                // Update the index *after* the predicate is called. If the index
443                // is updated prior and the predicate panics, the element at this
444                // index would be leaked.
445                self.idx += 1;
446                if drained {
447                    self.del += 1;
448                    return Some(core::ptr::read(&v[i]));
449                } else if self.del > 0 {
450                    let del = self.del;
451                    let src: *const T = &v[i];
452                    let dst: *mut T = &mut v[i - del];
453                    core::ptr::copy_nonoverlapping(src, dst, 1);
454                }
455            }
456            None
457        }
458    }
459
460    fn size_hint(&self) -> (usize, Option<usize>) {
461        (0, Some(self.old_len - self.idx))
462    }
463}
464
465#[cfg(feature = "extract_if")]
466impl<T, F, const N: usize> Drop for ExtractIf<'_, T, N, F>
467where
468    F: FnMut(&mut T) -> bool,
469{
470    fn drop(&mut self) {
471        unsafe {
472            if self.idx < self.old_len && self.del > 0 {
473                // This is a pretty messed up state, and there isn't really an
474                // obviously right thing to do. We don't want to keep trying
475                // to execute `pred`, so we just backshift all the unprocessed
476                // elements and tell the vec that they still exist. The backshift
477                // is required to prevent a double-drop of the last successfully
478                // drained item prior to a panic in the predicate.
479                let ptr = self.vec.as_mut_ptr();
480                let src = ptr.add(self.idx);
481                let dst = src.sub(self.del);
482                let tail_len = self.old_len - self.idx;
483                src.copy_to(dst, tail_len);
484            }
485            self.vec.set_len(self.old_len - self.del);
486        }
487    }
488}
489
490/// An iterator that consumes a `SmallVec` and yields its items by value.
491///
492/// Returned from [`SmallVec::into_iter`][1].
493///
494/// [1]: struct.SmallVec.html#method.into_iter
495pub struct IntoIter<T, const N: usize> {
496    // # Safety
497    //
498    // `end` decides whether the data lives on the heap or not
499    //
500    // The members from begin..end are initialized
501    raw: RawSmallVec<T, N>,
502    begin: usize,
503    end: TaggedLen,
504    _marker: PhantomData<T>,
505}
506
507// SAFETY: IntoIter has unique ownership of its contents.  Sending (or sharing) an `IntoIter<T, N>`
508// is equivalent to sending (or sharing) a `SmallVec<T, N>`.
509unsafe impl<T, const N: usize> Send for IntoIter<T, N> where T: Send {}
510unsafe impl<T, const N: usize> Sync for IntoIter<T, N> where T: Sync {}
511
512impl<T, const N: usize> IntoIter<T, N> {
513    #[inline]
514    const fn is_zst() -> bool {
515        size_of::<T>() == 0
516    }
517
518    #[inline]
519    const fn as_ptr(&self) -> *const T {
520        let on_heap = self.end.on_heap(Self::is_zst());
521        if on_heap {
522            // SAFETY: vector is on the heap
523            unsafe { self.raw.as_ptr_heap() }
524        } else {
525            self.raw.as_ptr_inline()
526        }
527    }
528
529    #[inline]
530    fn as_mut_ptr(&mut self) -> *mut T {
531        let on_heap = self.end.on_heap(Self::is_zst());
532        if on_heap {
533            // SAFETY: vector is on the heap
534            unsafe { self.raw.as_mut_ptr_heap() }
535        } else {
536            self.raw.as_mut_ptr_inline()
537        }
538    }
539
540    #[inline]
541    pub fn as_slice(&self) -> &[T] {
542        // SAFETY: The members in self.begin..self.end.value() are all initialized
543        // So the pointer arithmetic is valid, and so is the construction of the slice
544        unsafe {
545            let ptr = self.as_ptr();
546            core::slice::from_raw_parts(
547                ptr.add(self.begin),
548                self.end.value(Self::is_zst()) - self.begin,
549            )
550        }
551    }
552
553    #[inline]
554    pub fn as_mut_slice(&mut self) -> &mut [T] {
555        // SAFETY: see above
556        unsafe {
557            let ptr = self.as_mut_ptr();
558            core::slice::from_raw_parts_mut(
559                ptr.add(self.begin),
560                self.end.value(Self::is_zst()) - self.begin,
561            )
562        }
563    }
564}
565
566impl<T, const N: usize> Iterator for IntoIter<T, N> {
567    type Item = T;
568
569    #[inline]
570    fn next(&mut self) -> Option<Self::Item> {
571        if self.begin == self.end.value(Self::is_zst()) {
572            None
573        } else {
574            // SAFETY: see above
575            unsafe {
576                let ptr = self.as_mut_ptr();
577                let value = ptr.add(self.begin).read();
578                self.begin += 1;
579                Some(value)
580            }
581        }
582    }
583
584    #[inline]
585    fn size_hint(&self) -> (usize, Option<usize>) {
586        let size = self.end.value(Self::is_zst()) - self.begin;
587        (size, Some(size))
588    }
589}
590
591impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
592    #[inline]
593    fn next_back(&mut self) -> Option<Self::Item> {
594        let mut end = self.end.value(Self::is_zst());
595        if self.begin == end {
596            None
597        } else {
598            // SAFETY: see above
599            unsafe {
600                let ptr = self.as_mut_ptr();
601                let on_heap = self.end.on_heap(Self::is_zst());
602                end -= 1;
603                self.end = TaggedLen::new(end, on_heap, Self::is_zst());
604                let value = ptr.add(end).read();
605                Some(value)
606            }
607        }
608    }
609}
610impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {}
611impl<T, const N: usize> core::iter::FusedIterator for IntoIter<T, N> {}
612
613impl<T, const N: usize> SmallVec<T, N> {
614    #[inline]
615    const fn is_zst() -> bool {
616        size_of::<T>() == 0
617    }
618
619    #[inline]
620    pub const fn new() -> SmallVec<T, N> {
621        Self {
622            len: TaggedLen::new(0, false, Self::is_zst()),
623            raw: RawSmallVec::new(),
624            _marker: PhantomData,
625        }
626    }
627
628    #[inline]
629    pub fn with_capacity(capacity: usize) -> Self {
630        let mut this = Self::new();
631        if capacity > Self::inline_size() {
632            this.grow(capacity);
633        }
634        this
635    }
636
637    #[inline]
638    pub fn from_vec(vec: Vec<T>) -> Self {
639        if vec.capacity() == 0 {
640            return Self::new();
641        }
642
643        if Self::is_zst() {
644            // "Move" elements to stack buffer. They're ZST so we don't actually have to do
645            // anything. Just make sure they're not dropped.
646            // We don't wrap the vector in ManuallyDrop so that when it's dropped, the memory is
647            // deallocated, if it needs to be.
648            let mut vec = vec;
649            let len = vec.len();
650
651            // SAFETY: `0` is less than the vector's capacity.
652            // old_len..new_len is an empty range. So there are no uninitialized elements
653            unsafe { vec.set_len(0) };
654            Self {
655                len: TaggedLen::new(len, false, Self::is_zst()),
656                raw: RawSmallVec::new(),
657                _marker: PhantomData,
658            }
659        } else {
660            let mut vec = ManuallyDrop::new(vec);
661            let len = vec.len();
662            let cap = vec.capacity();
663            // SAFETY: vec.capacity is not `0` (checked above), so the pointer
664            // can not dangle and thus specifically cannot be null.
665            let ptr = unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) };
666
667            Self {
668                len: TaggedLen::new(len, true, Self::is_zst()),
669                raw: RawSmallVec::new_heap(ptr, cap),
670                _marker: PhantomData,
671            }
672        }
673    }
674
675    #[inline]
676    pub const fn from_buf(buf: [T; N]) -> Self {
677        // SAFETY: all the members in 0..N are initialized
678        Self {
679            len: TaggedLen::new(N, false, Self::is_zst()),
680            raw: RawSmallVec::new_inline(MaybeUninit::new(buf)),
681            _marker: PhantomData,
682        }
683    }
684
685    #[inline]
686    pub fn from_buf_and_len(buf: [T; N], len: usize) -> Self {
687        assert!(len <= N);
688        // SAFETY: all the members in 0..len are initialized
689        let mut vec = Self {
690            len: TaggedLen::new(len, false, Self::is_zst()),
691            raw: RawSmallVec::new_inline(MaybeUninit::new(buf)),
692            _marker: PhantomData,
693        };
694        // Deallocate the remaining elements so no memory is leaked.
695        unsafe {
696            // SAFETY: both the input and output pointers are in range of the stack allocation
697            let remainder_ptr = vec.raw.as_mut_ptr_inline().add(len);
698            let remainder_len = N - len;
699
700            // SAFETY: the values are initialized, so dropping them here is fine.
701            core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(
702                remainder_ptr,
703                remainder_len,
704            ));
705        }
706
707        vec
708    }
709
710    /// Constructs a new `SmallVec` on the stack from an A without copying elements. Also sets the length. The user is responsible for ensuring that `len <= A::size()`.
711    ///
712    /// # Examples
713    ///
714    /// ```
715    /// use smallvec::SmallVec;
716    /// use std::mem::MaybeUninit;
717    ///
718    /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
719    /// let small_vec = unsafe {
720    ///     SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), 5)
721    /// };
722    ///
723    /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
724    /// ```
725    ///
726    /// # Safety
727    ///
728    /// `len <= N`, and all the elements in `buf[..len]` must be initialized
729    #[inline]
730    pub const unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<[T; N]>, len: usize) -> Self {
731        debug_assert!(len <= N);
732        Self {
733            len: TaggedLen::new(len, false, Self::is_zst()),
734            raw: RawSmallVec::new_inline(buf),
735            _marker: PhantomData,
736        }
737    }
738
739    /// Sets the tag to be on the heap
740    ///
741    /// # Safety
742    ///
743    /// The active union member must be the self.raw.heap
744    #[inline]
745    unsafe fn set_on_heap(&mut self) {
746        self.len = TaggedLen::new(self.len(), true, Self::is_zst());
747    }
748
749    /// Sets the tag to be inline
750    ///
751    /// # Safety
752    ///
753    /// The active union member must be the self.raw.inline
754    #[inline]
755    unsafe fn set_inline(&mut self) {
756        self.len = TaggedLen::new(self.len(), false, Self::is_zst());
757    }
758
759    /// Sets the length of a vector.
760    ///
761    /// This will explicitly set the size of the vector, without actually modifying its buffers, so
762    /// it is up to the caller to ensure that the vector is actually the specified size.
763    ///
764    /// # Safety
765    ///
766    /// `new_len <= self.capacity()` must be true, and all the elements in the range `..self.len`
767    /// must be initialized.
768    #[inline]
769    pub unsafe fn set_len(&mut self, new_len: usize) {
770        debug_assert!(new_len <= self.capacity());
771        let on_heap = self.len.on_heap(Self::is_zst());
772        self.len = TaggedLen::new(new_len, on_heap, Self::is_zst());
773    }
774
775    #[inline]
776    pub const fn inline_size() -> usize {
777        if Self::is_zst() {
778            usize::MAX
779        } else {
780            N
781        }
782    }
783
784    #[inline]
785    pub const fn len(&self) -> usize {
786        self.len.value(Self::is_zst())
787    }
788
789    #[must_use]
790    #[inline]
791    pub fn is_empty(&self) -> bool {
792        self.len() == 0
793    }
794
795    #[inline]
796    pub const fn capacity(&self) -> usize {
797        if self.len.on_heap(Self::is_zst()) {
798            // SAFETY: raw.heap is active
799            unsafe { self.raw.heap.1 }
800        } else {
801            Self::inline_size()
802        }
803    }
804
805    #[inline]
806    pub const fn spilled(&self) -> bool {
807        self.len.on_heap(Self::is_zst())
808    }
809
810    /// Splits the collection into two at the given index.
811    ///
812    /// Returns a newly allocated vector containing the elements in the range
813    /// `[at, len)`. After the call, the original vector will be left containing
814    /// the elements `[0, at)` with its previous capacity unchanged.
815    ///
816    /// - If you want to take ownership of the entire contents and capacity of
817    ///   the vector, see [`core::mem::take`] or [`core::mem::replace`].
818    /// - If you don't need the returned vector at all, see [`SmallVec::truncate`].
819    /// - If you want to take ownership of an arbitrary subslice, or you don't
820    ///   necessarily want to store the removed items in a vector, see [`SmallVec::drain`].
821    ///
822    /// # Panics
823    ///
824    /// Panics if `at > len`.
825    ///
826    /// # Examples
827    ///
828    /// ```
829    /// let mut vec = vec![1, 2, 3];
830    /// let vec2 = vec.split_off(1);
831    /// assert_eq!(vec, [1]);
832    /// assert_eq!(vec2, [2, 3]);
833    /// ```
834    #[inline]
835    pub fn split_off(&mut self, at: usize) -> Self {
836        let len = self.len();
837        assert!(at <= len);
838
839        let other_len = len - at;
840        let mut other = Self::with_capacity(other_len);
841
842        // Unsafely `set_len` and copy items to `other`.
843        unsafe {
844            self.set_len(at);
845            other.set_len(other_len);
846
847            core::ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other_len);
848        }
849        other
850    }
851
852    pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, N>
853    where
854        R: core::ops::RangeBounds<usize>,
855    {
856        use core::ops::Bound::*;
857
858        let len = self.len();
859        let start = match range.start_bound() {
860            Included(&n) => n,
861            Excluded(&n) => n.checked_add(1).expect("Range start out of bounds"),
862            Unbounded => 0,
863        };
864        let end = match range.end_bound() {
865            Included(&n) => n.checked_add(1).expect("Range end out of bounds"),
866            Excluded(&n) => n,
867            Unbounded => len,
868        };
869
870        assert!(start <= end);
871        assert!(end <= len);
872
873        unsafe {
874            // SAFETY: `start <= len`
875            self.set_len(start);
876
877            // SAFETY: all the elements in `start..end` are initialized
878            let range_slice = core::slice::from_raw_parts(self.as_ptr().add(start), end - start);
879
880            // SAFETY: all the elements in `end..len` are initialized
881            Drain {
882                tail_start: end,
883                tail_len: len - end,
884                iter: range_slice.iter(),
885                // Since self is a &mut, passing it to a function would invalidate the slice iterator.
886                vec: core::ptr::NonNull::new_unchecked(self as *mut _),
887            }
888        }
889    }
890
891    #[cfg(feature = "extract_if")]
892    /// Creates an iterator which uses a closure to determine if an element should be removed.
893    ///
894    /// If the closure returns true, the element is removed and yielded.
895    /// If the closure returns false, the element will remain in the vector and will not be yielded
896    /// by the iterator.
897    ///
898    /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
899    /// or the iteration short-circuits, then the remaining elements will be retained.
900    /// Use [`retain`] with a negated predicate if you do not need the returned iterator.
901    ///
902    /// [`retain`]: SmallVec::retain
903    ///
904    /// Using this method is equivalent to the following code:
905    /// ```
906    /// # use smallvec::SmallVec;
907    /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
908    /// # let mut vec: SmallVec<i32, 8> = SmallVec::from_slice(&[1i32, 2, 3, 4, 5, 6]);
909    /// let mut i = 0;
910    /// while i < vec.len() {
911    ///     if some_predicate(&mut vec[i]) {
912    ///         let val = vec.remove(i);
913    ///         // your code here
914    ///     } else {
915    ///         i += 1;
916    ///     }
917    /// }
918    ///
919    /// # assert_eq!(vec, SmallVec::<i32, 8>::from_slice(&[1i32, 4, 5]));
920    /// ```
921    ///
922    /// But `extract_if` is easier to use. `extract_if` is also more efficient,
923    /// because it can backshift the elements of the array in bulk.
924    ///
925    /// Note that `extract_if` also lets you mutate every element in the filter closure,
926    /// regardless of whether you choose to keep or remove it.
927    ///
928    /// # Examples
929    ///
930    /// Splitting an array into evens and odds, reusing the original allocation:
931    ///
932    /// ```
933    /// # use smallvec::SmallVec;
934    /// let mut numbers: SmallVec<i32, 16> = SmallVec::from_slice(&[1i32, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
935    ///
936    /// let evens = numbers.extract_if(|x| *x % 2 == 0).collect::<SmallVec<i32, 16>>();
937    /// let odds = numbers;
938    ///
939    /// assert_eq!(evens, SmallVec::<i32, 16>::from_slice(&[2i32, 4, 6, 8, 14]));
940    /// assert_eq!(odds, SmallVec::<i32, 16>::from_slice(&[1i32, 3, 5, 9, 11, 13, 15]));
941    /// ```
942    pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, N, F>
943    where
944        F: FnMut(&mut T) -> bool,
945    {
946        let old_len = self.len();
947
948        // Guard against us getting leaked (leak amplification)
949        unsafe {
950            self.set_len(0);
951        }
952
953        ExtractIf {
954            vec: self,
955            idx: 0,
956            del: 0,
957            old_len,
958            pred: filter,
959        }
960    }
961
962    #[inline]
963    pub fn push(&mut self, value: T) {
964        let len = self.len();
965        if len == self.capacity() {
966            self.reserve(1);
967        }
968        // SAFETY: both the input and output are within the allocation
969        let ptr = unsafe { self.as_mut_ptr().add(len) };
970        // SAFETY: we allocated enough space in case it wasn't enough, so the address is valid for
971        // writes.
972        unsafe { ptr.write(value) };
973        unsafe { self.set_len(len + 1) }
974    }
975
976    #[inline]
977    pub fn pop(&mut self) -> Option<T> {
978        if self.is_empty() {
979            None
980        } else {
981            let len = self.len() - 1;
982            // SAFETY: len < old_len since this can't overflow, because the old length is non zero
983            unsafe { self.set_len(len) };
984            // SAFETY: this element was initialized and we just gave up ownership of it, so we can
985            // give it away
986            let value = unsafe { self.as_mut_ptr().add(len).read() };
987            Some(value)
988        }
989    }
990
991    #[inline]
992    pub fn append<const M: usize>(&mut self, other: &mut SmallVec<T, M>) {
993        // can't overflow since both are smaller than isize::MAX and 2 * isize::MAX < usize::MAX
994        let len = self.len();
995        let other_len = other.len();
996        let total_len = len + other_len;
997        if total_len > self.capacity() {
998            self.reserve(other_len);
999        }
1000
1001        // SAFETY: see `Self::push`
1002        let ptr = unsafe { self.as_mut_ptr().add(len) };
1003        unsafe { other.set_len(0) }
1004        // SAFETY: we have a mutable reference to each vector and each uniquely owns its memory.
1005        // so the ranges can't overlap
1006        unsafe { copy_nonoverlapping(other.as_ptr(), ptr, other_len) };
1007        unsafe { self.set_len(total_len) }
1008    }
1009
1010    #[inline]
1011    pub fn grow(&mut self, new_capacity: usize) {
1012        infallible(self.try_grow(new_capacity));
1013    }
1014
1015    #[cold]
1016    pub fn try_grow(&mut self, new_capacity: usize) -> Result<(), CollectionAllocErr> {
1017        if Self::is_zst() {
1018            return Ok(());
1019        }
1020
1021        let len = self.len();
1022        assert!(new_capacity >= len);
1023
1024        if new_capacity > Self::inline_size() {
1025            // SAFETY: we checked all the preconditions
1026            let result = unsafe { self.raw.try_grow_raw(self.len, new_capacity) };
1027
1028            if result.is_ok() {
1029                // SAFETY: the allocation succeeded, so self.raw.heap is now active
1030                unsafe { self.set_on_heap() };
1031            }
1032            result
1033        } else {
1034            // new_capacity <= Self::inline_size()
1035            if self.spilled() {
1036                unsafe {
1037                    // SAFETY: heap member is active
1038                    let (ptr, old_cap) = self.raw.heap;
1039                    // inline member is now active
1040
1041                    // SAFETY: len <= new_capacity <= Self::inline_size()
1042                    // so the copy is within bounds of the inline member
1043                    copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len);
1044                    drop(DropDealloc {
1045                        ptr: ptr.cast(),
1046                        size_bytes: old_cap * size_of::<T>(),
1047                        align: align_of::<T>(),
1048                    });
1049                    self.set_inline();
1050                }
1051            }
1052            Ok(())
1053        }
1054    }
1055
1056    #[inline]
1057    pub fn reserve(&mut self, additional: usize) {
1058        // can't overflow since len <= capacity
1059        if additional > self.capacity() - self.len() {
1060            let new_capacity = infallible(
1061                self.len()
1062                    .checked_add(additional)
1063                    .and_then(usize::checked_next_power_of_two)
1064                    .ok_or(CollectionAllocErr::CapacityOverflow),
1065            );
1066            self.grow(new_capacity);
1067        }
1068    }
1069
1070    #[inline]
1071    pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1072        if additional > self.capacity() - self.len() {
1073            let new_capacity = self
1074                .len()
1075                .checked_add(additional)
1076                .and_then(usize::checked_next_power_of_two)
1077                .ok_or(CollectionAllocErr::CapacityOverflow)?;
1078            self.try_grow(new_capacity)
1079        } else {
1080            Ok(())
1081        }
1082    }
1083
1084    #[inline]
1085    pub fn reserve_exact(&mut self, additional: usize) {
1086        // can't overflow since len <= capacity
1087        if additional > self.capacity() - self.len() {
1088            let new_capacity = infallible(
1089                self.len()
1090                    .checked_add(additional)
1091                    .ok_or(CollectionAllocErr::CapacityOverflow),
1092            );
1093            self.grow(new_capacity);
1094        }
1095    }
1096
1097    #[inline]
1098    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1099        if additional > self.capacity() - self.len() {
1100            let new_capacity = self
1101                .len()
1102                .checked_add(additional)
1103                .ok_or(CollectionAllocErr::CapacityOverflow)?;
1104            self.try_grow(new_capacity)
1105        } else {
1106            Ok(())
1107        }
1108    }
1109
1110    #[inline]
1111    pub fn shrink_to_fit(&mut self) {
1112        if !self.spilled() {
1113            return;
1114        }
1115        let len = self.len();
1116        if len <= Self::inline_size() {
1117            // SAFETY: self.spilled() is true, so we're on the heap
1118            unsafe {
1119                let (ptr, capacity) = self.raw.heap;
1120                self.raw = RawSmallVec::new_inline(MaybeUninit::uninit());
1121                copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len);
1122                self.set_inline();
1123                alloc::alloc::dealloc(
1124                    ptr.cast().as_ptr(),
1125                    Layout::from_size_align_unchecked(capacity * size_of::<T>(), align_of::<T>()),
1126                );
1127            }
1128        } else if len < self.capacity() {
1129            // SAFETY: len > Self::inline_size() >= 0
1130            // so new capacity is non zero, it is equal to the length
1131            // T can't be a ZST because SmallVec<ZST, N> is never spilled.
1132            unsafe { infallible(self.raw.try_grow_raw(self.len, len)) };
1133        }
1134    }
1135
1136    #[inline]
1137    pub fn truncate(&mut self, len: usize) {
1138        let old_len = self.len();
1139        if len < old_len {
1140            // SAFETY: we set `len` to a smaller value
1141            // then we drop the previously initialized elements
1142            unsafe {
1143                self.set_len(len);
1144                core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(
1145                    self.as_mut_ptr().add(len),
1146                    old_len - len,
1147                ))
1148            }
1149        }
1150    }
1151
1152    #[inline]
1153    pub fn as_slice(&self) -> &[T] {
1154        let len = self.len();
1155        let ptr = self.as_ptr();
1156        // SAFETY: all the elements in `..len` are initialized
1157        unsafe { core::slice::from_raw_parts(ptr, len) }
1158    }
1159
1160    #[inline]
1161    pub fn as_mut_slice(&mut self) -> &mut [T] {
1162        let len = self.len();
1163        let ptr = self.as_mut_ptr();
1164        // SAFETY: see above
1165        unsafe { core::slice::from_raw_parts_mut(ptr, len) }
1166    }
1167
1168    #[inline]
1169    pub fn swap_remove(&mut self, index: usize) -> T {
1170        let len = self.len();
1171        assert!(index < len);
1172        // This can't overflow since `len > index >= 0`
1173        let new_len = len - 1;
1174        unsafe {
1175            // SAFETY: we set len to a smaller value
1176            self.set_len(new_len);
1177            let ptr = self.as_mut_ptr();
1178            let last = ptr.add(new_len);
1179            let ith = ptr.add(index);
1180            // This item is initialized since it was in the vector just before
1181            let last_item = last.read();
1182            // This item is initialized since index < len
1183            let ith_item = ith.read();
1184
1185            // Note that these may be the same element.
1186            // This is fine since in this case we just write it back to the pointer past the end of
1187            // the vector, so the vector no longer owns it
1188            ith.write(last_item);
1189            ith_item
1190        }
1191    }
1192
1193    #[inline]
1194    pub fn clear(&mut self) {
1195        self.truncate(0);
1196    }
1197
1198    #[inline]
1199    pub fn remove(&mut self, index: usize) -> T {
1200        let len = self.len();
1201        assert!(index < len);
1202        let new_len = len - 1;
1203        unsafe {
1204            // SAFETY: new_len < len
1205            self.set_len(new_len);
1206            let ptr = self.as_mut_ptr();
1207            let ith = ptr.add(index);
1208            // This item is initialized since index < len
1209            let ith_item = ith.read();
1210            copy(ith.add(1), ith, new_len - index);
1211            ith_item
1212        }
1213    }
1214
1215    #[inline]
1216    pub fn insert(&mut self, index: usize, value: T) {
1217        let len = self.len();
1218        assert!(index <= len);
1219        self.reserve(1);
1220        let ptr = self.as_mut_ptr();
1221        unsafe {
1222            // the elements at `index + 1..len + 1` are now initialized
1223            if index < len {
1224                copy(ptr.add(index), ptr.add(index + 1), len - index);
1225            }
1226            // the element at `index` is now initialized
1227            ptr.add(index).write(value);
1228
1229            // SAFETY: all the elements are initialized
1230            self.set_len(len + 1);
1231        }
1232    }
1233
1234    fn insert_many_impl<I: Iterator<Item = T>>(&mut self, mut index: usize, iter: I) {
1235        let len = self.len();
1236        if index == len {
1237            return self.extend(iter);
1238        }
1239
1240        let mut iter = iter.fuse();
1241        let (lower_bound, _) = iter.size_hint();
1242        self.reserve(lower_bound);
1243
1244        let count = unsafe {
1245            let ptr = self.as_mut_ptr();
1246            // SAFETY: ptr is valid for `lower_bound` writes since we just reserved that much
1247            let count = insert_many_batch(ptr, index, lower_bound, len, &mut iter);
1248            // SAFETY: insert_many_batch_phase returns the number of elements it initialized, and
1249            // leaves the vector in a valid state, without setting the new length
1250            self.set_len(len + count);
1251            count
1252        };
1253
1254        index += count;
1255        iter.enumerate()
1256            .for_each(|(i, item)| self.insert(index + i, item));
1257    }
1258
1259    #[inline]
1260    pub fn insert_many<I: IntoIterator<Item = T>>(&mut self, index: usize, iterable: I) {
1261        self.insert_many_impl(index, iterable.into_iter());
1262    }
1263
1264    #[inline]
1265    pub const fn as_ptr(&self) -> *const T {
1266        if self.len.on_heap(Self::is_zst()) {
1267            // SAFETY: heap member is active
1268            unsafe { self.raw.as_ptr_heap() }
1269        } else {
1270            self.raw.as_ptr_inline()
1271        }
1272    }
1273
1274    #[inline]
1275    pub fn as_mut_ptr(&mut self) -> *mut T {
1276        if self.len.on_heap(Self::is_zst()) {
1277            // SAFETY: see above
1278            unsafe { self.raw.as_mut_ptr_heap() }
1279        } else {
1280            self.raw.as_mut_ptr_inline()
1281        }
1282    }
1283
1284    #[inline]
1285    pub fn into_vec(self) -> Vec<T> {
1286        let len = self.len();
1287        if !self.spilled() {
1288            let mut vec = Vec::with_capacity(len);
1289            let this = ManuallyDrop::new(self);
1290            // SAFETY: we create a new vector with sufficient capacity, copy our elements into it
1291            // to transfer ownership and then set the length
1292            // we don't drop the elements we previously held
1293            unsafe {
1294                copy_nonoverlapping(this.raw.as_ptr_inline(), vec.as_mut_ptr(), len);
1295                vec.set_len(len);
1296            }
1297            vec
1298        } else {
1299            let this = ManuallyDrop::new(self);
1300            // SAFETY:
1301            // - `ptr` was created with the global allocator
1302            // - `ptr` was created with the appropriate alignment for `T`
1303            // - the allocation pointed to by ptr is exactly cap * sizeof(T)
1304            // - `len` is less than or equal to `cap`
1305            // - the first `len` entries are proper `T`-values
1306            // - the allocation is not larger than `isize::MAX`
1307            unsafe {
1308                let (ptr, cap) = this.raw.heap;
1309                Vec::from_raw_parts(ptr.as_ptr(), len, cap)
1310            }
1311        }
1312    }
1313
1314    #[inline]
1315    pub fn into_boxed_slice(self) -> Box<[T]> {
1316        self.into_vec().into_boxed_slice()
1317    }
1318
1319    #[inline]
1320    pub fn into_inner(self) -> Result<[T; N], Self> {
1321        if self.len() != N {
1322            Err(self)
1323        } else {
1324            // when `this` is dropped, the memory is released if it's on the heap.
1325            let mut this = self;
1326            // SAFETY: we release ownership of the elements we hold
1327            unsafe {
1328                this.set_len(0);
1329            }
1330            let ptr = this.as_ptr() as *const [T; N];
1331            // SAFETY: these elements are initialized since the length was `N`
1332            unsafe { Ok(ptr.read()) }
1333        }
1334    }
1335
1336    pub fn retain<F: FnMut(&mut T) -> bool>(&mut self, mut f: F) {
1337        let mut del = 0;
1338        let len = self.len();
1339        let ptr = self.as_mut_ptr();
1340        for i in 0..len {
1341            // SAFETY: all the pointers are in bounds
1342            // `i - del` never overflows since `del <= i` is a maintained invariant
1343            unsafe {
1344                if !f(&mut *ptr.add(i)) {
1345                    del += 1;
1346                } else if del > 0 {
1347                    core::ptr::swap(ptr.add(i), ptr.add(i - del));
1348                }
1349            }
1350        }
1351        self.truncate(len - del);
1352    }
1353
1354    #[inline]
1355    pub fn retain_mut<F: FnMut(&mut T) -> bool>(&mut self, f: F) {
1356        self.retain(f)
1357    }
1358
1359    #[inline]
1360    pub fn dedup(&mut self)
1361    where
1362        T: PartialEq,
1363    {
1364        self.dedup_by(|a, b| a == b);
1365    }
1366
1367    #[inline]
1368    pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1369    where
1370        F: FnMut(&mut T) -> K,
1371        K: PartialEq<K>,
1372    {
1373        self.dedup_by(|a, b| key(a) == key(b));
1374    }
1375
1376    #[inline]
1377    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1378    where
1379        F: FnMut(&mut T, &mut T) -> bool,
1380    {
1381        // See the implementation of Vec::dedup_by in the
1382        // standard library for an explanation of this algorithm.
1383        let len = self.len();
1384        if len <= 1 {
1385            return;
1386        }
1387
1388        let ptr = self.as_mut_ptr();
1389        let mut w: usize = 1;
1390
1391        unsafe {
1392            for r in 1..len {
1393                let p_r = ptr.add(r);
1394                let p_wm1 = ptr.add(w - 1);
1395                if !same_bucket(&mut *p_r, &mut *p_wm1) {
1396                    if r != w {
1397                        let p_w = p_wm1.add(1);
1398                        core::ptr::swap(p_r, p_w);
1399                    }
1400                    w += 1;
1401                }
1402            }
1403        }
1404
1405        self.truncate(w);
1406    }
1407
1408    pub fn resize_with<F>(&mut self, new_len: usize, f: F)
1409    where
1410        F: FnMut() -> T,
1411    {
1412        let old_len = self.len();
1413        if old_len < new_len {
1414            let mut f = f;
1415            let additional = new_len - old_len;
1416            self.reserve(additional);
1417            for _ in 0..additional {
1418                self.push(f());
1419            }
1420        } else if old_len > new_len {
1421            self.truncate(new_len);
1422        }
1423    }
1424
1425    /// Returns the remaining spare capacity of the vector as a slice of
1426    /// `MaybeUninit<T>`.
1427    ///
1428    /// The returned slice can be used to fill the vector with data (e.g. by
1429    /// reading from a file) before marking the data as initialized using the
1430    /// [`set_len`](Self::set_len) method.
1431    #[inline]
1432    pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
1433        unsafe {
1434            core::slice::from_raw_parts_mut(
1435                self.as_mut_ptr().add(self.len()) as *mut MaybeUninit<T>,
1436                self.capacity() - self.len(),
1437            )
1438        }
1439    }
1440
1441    /// Creates a `SmallVec` directly from the raw components of another `SmallVec`.
1442    ///
1443    /// # Safety
1444    ///
1445    /// This is highly unsafe, due to the number of invariants that aren’t checked:
1446    ///
1447    /// - `ptr` needs to have been previously allocated via `SmallVec` from its spilled storage (at least, it’s highly likely to be incorrect if it wasn’t).
1448    /// - `ptr`’s `A::Item` type needs to be the same size and alignment that it was allocated with
1449    /// - `length` needs to be less than or equal to `capacity`.
1450    /// - `capacity` needs to be the capacity that the pointer was allocated with.
1451    ///
1452    /// Violating these may cause problems like corrupting the allocator’s internal data structures.
1453    ///
1454    /// Additionally, `capacity` must be greater than the amount of inline storage `A` has; that is, the new `SmallVec` must need to spill over into heap allocated storage. This condition is asserted against.
1455    ///
1456    /// The ownership of `ptr` is effectively transferred to the `SmallVec` which may then deallocate, reallocate or change the contents of memory pointed to by the pointer at will. Ensure that nothing else uses the pointer after calling this function.
1457    ///
1458    /// # Examples
1459    ///
1460    /// ```
1461    /// use smallvec::{SmallVec, smallvec};
1462    ///
1463    /// let mut v: SmallVec<_, 1> = smallvec![1, 2, 3];
1464    ///
1465    /// // Pull out the important parts of `v`.
1466    /// let p = v.as_mut_ptr();
1467    /// let len = v.len();
1468    /// let cap = v.capacity();
1469    /// let spilled = v.spilled();
1470    ///
1471    /// unsafe {
1472    ///     // Forget all about `v`. The heap allocation that stored the
1473    ///     // three values won't be deallocated.
1474    ///     std::mem::forget(v);
1475    ///
1476    ///     // Overwrite memory with [4, 5, 6].
1477    ///     //
1478    ///     // This is only safe if `spilled` is true! Otherwise, we are
1479    ///     // writing into the old `SmallVec`'s inline storage on the
1480    ///     // stack.
1481    ///     assert!(spilled);
1482    ///     for i in 0..len {
1483    ///         std::ptr::write(p.add(i), 4 + i);
1484    ///     }
1485    ///
1486    ///     // Put everything back together into a SmallVec with a different
1487    ///     // amount of inline storage, but which is still less than `cap`.
1488    ///     let rebuilt = SmallVec::<_, 2>::from_raw_parts(p, len, cap);
1489    ///     assert_eq!(&*rebuilt, &[4, 5, 6]);
1490    /// }
1491    /// ```
1492    #[inline]
1493    pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> SmallVec<T, N> {
1494        assert!(!Self::is_zst());
1495
1496        // SAFETY: We require caller to provide same ptr as we alloc
1497        // and we never alloc null pointer.
1498        let ptr = unsafe {
1499            debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer.");
1500            NonNull::new_unchecked(ptr)
1501        };
1502
1503        SmallVec {
1504            len: TaggedLen::new(length, true, Self::is_zst()),
1505            raw: RawSmallVec::new_heap(ptr, capacity),
1506            _marker: PhantomData,
1507        }
1508    }
1509
1510    fn extend_impl<I: Iterator<Item = T>>(&mut self, iter: I) {
1511        let mut iter = iter.fuse();
1512        let (lower_bound, _) = iter.size_hint();
1513        self.reserve(lower_bound);
1514        let mut capacity = self.capacity();
1515        let mut ptr = self.as_mut_ptr();
1516        unsafe {
1517            loop {
1518                let mut len = self.len();
1519                // SAFETY: ptr is valid for `capacity - len` writes
1520                ptr = ptr.add(len);
1521                let mut guard = DropGuard { ptr, len: 0 };
1522                iter.by_ref().take(capacity - len).for_each(|item| {
1523                    ptr.add(guard.len).write(item);
1524                    guard.len += 1;
1525                });
1526                len += guard.len;
1527                core::mem::forget(guard);
1528                self.set_len(len);
1529                // At this point we either consumed all capacity or the iterator is exhausted (fused)
1530                if let Some(item) = iter.next() {
1531                    self.push(item);
1532                } else {
1533                    return;
1534                }
1535                // SAFETY: The push above would have spilled it
1536                let (heap_ptr, heap_capacity) = self.raw.heap;
1537                ptr = heap_ptr.as_ptr();
1538                capacity = heap_capacity;
1539            }
1540        }
1541    }
1542}
1543
1544impl<T, const N: usize> Default for SmallVec<T, N> {
1545    #[inline]
1546    fn default() -> Self {
1547        Self::new()
1548    }
1549}
1550
1551impl<T: Copy, const N: usize> SmallVec<T, N> {
1552    #[inline]
1553    pub fn from_slice(slice: &[T]) -> Self {
1554        let len = slice.len();
1555        if len <= Self::inline_size() {
1556            let mut this = Self::new();
1557            unsafe {
1558                let ptr = this.raw.as_mut_ptr_inline();
1559                copy_nonoverlapping(slice.as_ptr(), ptr, len);
1560                this.set_len(len);
1561            }
1562            this
1563        } else {
1564            let mut this = Vec::with_capacity(len);
1565            unsafe {
1566                let ptr = this.as_mut_ptr();
1567                copy_nonoverlapping(slice.as_ptr(), ptr, len);
1568                this.set_len(len);
1569            }
1570            Self::from_vec(this)
1571        }
1572    }
1573
1574    #[inline]
1575    pub fn insert_from_slice(&mut self, index: usize, slice: &[T]) {
1576        let len = self.len();
1577        let other_len = slice.len();
1578        assert!(index <= len);
1579        self.reserve(other_len);
1580        unsafe {
1581            let base_ptr = self.as_mut_ptr();
1582            let ith_ptr = base_ptr.add(index);
1583            let shifted_ptr = base_ptr.add(index + other_len);
1584            // elements at `index + other_len..len + other_len` are now initialized
1585            copy(ith_ptr, shifted_ptr, len - index);
1586            // elements at `index..index + other_len` are now initialized
1587            copy_nonoverlapping(slice.as_ptr(), ith_ptr, other_len);
1588
1589            // SAFETY: all the elements are initialized
1590            self.set_len(len + other_len);
1591        }
1592    }
1593
1594    #[inline]
1595    pub fn extend_from_slice(&mut self, slice: &[T]) {
1596        let len = self.len();
1597        let other_len = slice.len();
1598        self.reserve(other_len);
1599        // SAFETY: see above
1600        unsafe {
1601            let base_ptr = self.as_mut_ptr();
1602            let end_ptr = base_ptr.add(len);
1603            copy_nonoverlapping(slice.as_ptr(), end_ptr, other_len);
1604            self.set_len(len + other_len);
1605        }
1606    }
1607}
1608
1609impl<T: Clone, const N: usize> SmallVec<T, N> {
1610    #[inline]
1611    pub fn resize(&mut self, len: usize, value: T) {
1612        let old_len = self.len();
1613        if len > old_len {
1614            self.extend(core::iter::repeat(value).take(len - old_len));
1615        } else {
1616            self.truncate(len);
1617        }
1618    }
1619
1620    #[inline]
1621    pub fn from_elem(elem: T, n: usize) -> Self {
1622        if n > Self::inline_size() {
1623            Self::from_vec(vec![elem; n])
1624        } else {
1625            let mut v = Self::new();
1626
1627            unsafe {
1628                let ptr = v.raw.as_mut_ptr_inline();
1629                let mut guard = DropGuard { ptr, len: 0 };
1630
1631                // SAFETY: `n <= Self::inline_size()` so we can write `n` elements
1632                for i in 0..n {
1633                    guard.len = i;
1634                    ptr.add(i).write(elem.clone());
1635                }
1636                core::mem::forget(guard);
1637                // SAFETY: we just initialized `n` elements in the vector
1638                v.set_len(n);
1639            }
1640            v
1641        }
1642    }
1643}
1644
1645struct DropShiftGuard<T> {
1646    ptr: *mut T,
1647    len: usize,
1648    shifted_ptr: *const T,
1649    shifted_len: usize,
1650}
1651impl<T> Drop for DropShiftGuard<T> {
1652    #[inline]
1653    fn drop(&mut self) {
1654        unsafe {
1655            core::ptr::slice_from_raw_parts_mut(self.ptr, self.len).drop_in_place();
1656            copy(self.shifted_ptr, self.ptr, self.shifted_len);
1657        }
1658    }
1659}
1660
1661struct DropGuard<T> {
1662    ptr: *mut T,
1663    len: usize,
1664}
1665impl<T> Drop for DropGuard<T> {
1666    #[inline]
1667    fn drop(&mut self) {
1668        unsafe {
1669            core::ptr::slice_from_raw_parts_mut(self.ptr, self.len).drop_in_place();
1670        }
1671    }
1672}
1673
1674// Safety:
1675//
1676// `ptr..ptr + lower_bound` must be valid for writes
1677#[inline]
1678unsafe fn insert_many_batch<T, I: Iterator<Item = T>>(
1679    ptr: *mut T,
1680    index: usize,
1681    lower_bound: usize,
1682    len: usize,
1683    iter: &mut I,
1684) -> usize {
1685    // shift elements to the right to make space for the initial elements from the iterator
1686    copy(ptr.add(index), ptr.add(index + lower_bound), len - index);
1687    let ptr_ith = ptr.add(index);
1688    let mut guard = DropShiftGuard {
1689        ptr: ptr_ith,
1690        len: 0,
1691        shifted_ptr: ptr_ith.add(lower_bound),
1692        shifted_len: len - index,
1693    };
1694    iter.take(lower_bound).enumerate().for_each(|(i, item)| {
1695        ptr_ith.add(i).write(item);
1696        guard.len = i + 1;
1697    });
1698    let count = guard.len;
1699    core::mem::forget(guard);
1700
1701    if count < lower_bound {
1702        copy(ptr_ith.add(lower_bound), ptr_ith.add(count), len - index);
1703    }
1704    count
1705}
1706
1707impl<T, const N: usize> Extend<T> for SmallVec<T, N> {
1708    #[inline]
1709    fn extend<I: IntoIterator<Item = T>>(&mut self, iterable: I) {
1710        self.extend_impl(iterable.into_iter());
1711    }
1712}
1713
1714struct DropDealloc {
1715    ptr: NonNull<u8>,
1716    size_bytes: usize,
1717    align: usize,
1718}
1719
1720impl Drop for DropDealloc {
1721    #[inline]
1722    fn drop(&mut self) {
1723        unsafe {
1724            if self.size_bytes > 0 {
1725                alloc::alloc::dealloc(
1726                    self.ptr.as_ptr(),
1727                    Layout::from_size_align_unchecked(self.size_bytes, self.align),
1728                );
1729            }
1730        }
1731    }
1732}
1733
1734#[cfg(feature = "may_dangle")]
1735unsafe impl<#[may_dangle] T, const N: usize> Drop for SmallVec<T, N> {
1736    fn drop(&mut self) {
1737        let on_heap = self.spilled();
1738        let len = self.len();
1739        let ptr = self.as_mut_ptr();
1740        // SAFETY: we first drop the elements, then `_drop_dealloc` is dropped, releasing memory we
1741        // used to own
1742        unsafe {
1743            let _drop_dealloc = if on_heap {
1744                let capacity = self.capacity();
1745                Some(DropDealloc {
1746                    ptr: NonNull::new_unchecked(ptr as *mut u8),
1747                    size_bytes: capacity * size_of::<T>(),
1748                    align: align_of::<T>(),
1749                })
1750            } else {
1751                None
1752            };
1753            core::ptr::slice_from_raw_parts_mut(ptr, len).drop_in_place();
1754        }
1755    }
1756}
1757
1758#[cfg(not(feature = "may_dangle"))]
1759impl<T, const N: usize> Drop for SmallVec<T, N> {
1760    fn drop(&mut self) {
1761        let on_heap = self.spilled();
1762        let len = self.len();
1763        let ptr = self.as_mut_ptr();
1764        // SAFETY: see above
1765        unsafe {
1766            let _drop_dealloc = if on_heap {
1767                let capacity = self.capacity();
1768                Some(DropDealloc {
1769                    ptr: NonNull::new_unchecked(ptr as *mut u8),
1770                    size_bytes: capacity * size_of::<T>(),
1771                    align: align_of::<T>(),
1772                })
1773            } else {
1774                None
1775            };
1776            core::ptr::slice_from_raw_parts_mut(ptr, len).drop_in_place();
1777        }
1778    }
1779}
1780
1781impl<T, const N: usize> Drop for IntoIter<T, N> {
1782    fn drop(&mut self) {
1783        // SAFETY: see above
1784        unsafe {
1785            let is_zst = size_of::<T>() == 0;
1786            let on_heap = self.end.on_heap(is_zst);
1787            let begin = self.begin;
1788            let end = self.end.value(is_zst);
1789            let ptr = self.as_mut_ptr();
1790            let _drop_dealloc = if on_heap {
1791                let capacity = self.raw.heap.1;
1792                Some(DropDealloc {
1793                    ptr: NonNull::new_unchecked(ptr as *mut u8),
1794                    size_bytes: capacity * size_of::<T>(),
1795                    align: align_of::<T>(),
1796                })
1797            } else {
1798                None
1799            };
1800            core::ptr::slice_from_raw_parts_mut(ptr.add(begin), end - begin).drop_in_place();
1801        }
1802    }
1803}
1804
1805impl<T, const N: usize> core::ops::Deref for SmallVec<T, N> {
1806    type Target = [T];
1807
1808    #[inline]
1809    fn deref(&self) -> &Self::Target {
1810        self.as_slice()
1811    }
1812}
1813impl<T, const N: usize> core::ops::DerefMut for SmallVec<T, N> {
1814    #[inline]
1815    fn deref_mut(&mut self) -> &mut Self::Target {
1816        self.as_mut_slice()
1817    }
1818}
1819
1820impl<T, const N: usize> core::iter::FromIterator<T> for SmallVec<T, N> {
1821    #[inline]
1822    fn from_iter<I: IntoIterator<Item = T>>(iterable: I) -> Self {
1823        let mut vec = Self::new();
1824        vec.extend_impl(iterable.into_iter());
1825        vec
1826    }
1827}
1828
1829#[cfg(feature = "specialization")]
1830trait SpecFrom {
1831    type Element;
1832    fn spec_from(slice: &[Self::Element]) -> Self;
1833}
1834
1835#[cfg(feature = "specialization")]
1836impl<T: Clone, const N: usize> SpecFrom for SmallVec<T, N> {
1837    type Element = T;
1838
1839    default fn spec_from(slice: &[Self::Element]) -> Self {
1840        slice.iter().cloned().collect()
1841    }
1842}
1843
1844#[cfg(feature = "specialization")]
1845impl<T: Copy, const N: usize> SpecFrom for SmallVec<T, N> {
1846    fn spec_from(slice: &[Self::Element]) -> Self {
1847        Self::from_slice(slice)
1848    }
1849}
1850
1851#[cfg(feature = "specialization")]
1852impl<'a, T: Clone, const N: usize> From<&'a [T]> for SmallVec<T, N> {
1853    fn from(slice: &'a [T]) -> Self {
1854        <Self as SpecFrom>::spec_from(slice)
1855    }
1856}
1857
1858#[cfg(not(feature = "specialization"))]
1859impl<'a, T: Clone, const N: usize> From<&'a [T]> for SmallVec<T, N> {
1860    fn from(slice: &'a [T]) -> Self {
1861        slice.iter().cloned().collect()
1862    }
1863}
1864
1865impl<T, const N: usize, const M: usize> From<[T; M]> for SmallVec<T, N> {
1866    fn from(array: [T; M]) -> Self {
1867        if M > N {
1868            // If M > N, we'd have to heap allocate anyway,
1869            // so delegate for Vec for the allocation
1870            Self::from(Vec::from(array))
1871        } else {
1872            // M <= N
1873            let mut this = Self::new();
1874            debug_assert!(M <= this.capacity());
1875            let array = ManuallyDrop::new(array);
1876            // SAFETY: M <= this.capacity()
1877            unsafe {
1878                copy_nonoverlapping(array.as_ptr(), this.as_mut_ptr(), M);
1879                this.set_len(M);
1880            }
1881            this
1882        }
1883    }
1884}
1885impl<T, const N: usize> From<Vec<T>> for SmallVec<T, N> {
1886    fn from(array: Vec<T>) -> Self {
1887        Self::from_vec(array)
1888    }
1889}
1890
1891impl<T: Clone, const N: usize> Clone for SmallVec<T, N> {
1892    #[inline]
1893    fn clone(&self) -> SmallVec<T, N> {
1894        SmallVec::from(self.as_slice())
1895    }
1896
1897    fn clone_from(&mut self, source: &Self) {
1898        // Inspired from `impl Clone for Vec`.
1899
1900        // drop anything that will not be overwritten
1901        self.truncate(source.len());
1902
1903        // SAFETY: self.len <= other.len due to the truncate above, so the
1904        // slices here are always in-bounds.
1905        let init = unsafe { source.get_unchecked(..self.len()) };
1906        let tail = unsafe { source.get_unchecked(self.len()..) };
1907
1908        // reuse the contained values' allocations/resources.
1909        self.clone_from_slice(init);
1910        self.extend(tail.iter().cloned());
1911    }
1912}
1913
1914impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
1915    #[inline]
1916    fn clone(&self) -> IntoIter<T, N> {
1917        SmallVec::from(self.as_slice()).into_iter()
1918    }
1919}
1920
1921#[macro_export]
1922macro_rules! smallvec {
1923    // count helper: transform any expression into 1
1924    (@one $x:expr) => (1usize);
1925    ($elem:expr; $n:expr) => ({
1926        $crate::SmallVec::from_elem($elem, $n)
1927    });
1928    ($($x:expr),*$(,)?) => ({
1929        let count = 0usize $(+ $crate::smallvec!(@one $x))*;
1930        #[allow(unused_mut)]
1931        let mut vec = $crate::SmallVec::new();
1932        if count <= vec.capacity() {
1933            $(vec.push($x);)*
1934            vec
1935        } else {
1936            $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*])
1937        }
1938    });
1939}
1940
1941#[macro_export]
1942macro_rules! smallvec_inline {
1943    // count helper: transform any expression into 1
1944    (@one $x:expr) => (1usize);
1945    ($elem:expr; $n:expr) => ({
1946        $crate::SmallVec::<_, $n>::from_buf([$elem; $n])
1947    });
1948    ($($x:expr),+ $(,)?) => ({
1949        const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*;
1950        $crate::SmallVec::<_, N>::from_buf([$($x,)*])
1951    });
1952}
1953
1954impl<T, const N: usize> IntoIterator for SmallVec<T, N> {
1955    type IntoIter = IntoIter<T, N>;
1956    type Item = T;
1957    fn into_iter(self) -> Self::IntoIter {
1958        // SAFETY: we move out of this.raw by reading the value at its address, which is fine since
1959        // we don't drop it
1960        unsafe {
1961            // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements
1962            let this = ManuallyDrop::new(self);
1963            IntoIter {
1964                raw: (&this.raw as *const RawSmallVec<T, N>).read(),
1965                begin: 0,
1966                end: this.len,
1967                _marker: PhantomData,
1968            }
1969        }
1970    }
1971}
1972
1973impl<'a, T, const N: usize> IntoIterator for &'a SmallVec<T, N> {
1974    type IntoIter = core::slice::Iter<'a, T>;
1975    type Item = &'a T;
1976    fn into_iter(self) -> Self::IntoIter {
1977        self.iter()
1978    }
1979}
1980
1981impl<'a, T, const N: usize> IntoIterator for &'a mut SmallVec<T, N> {
1982    type IntoIter = core::slice::IterMut<'a, T>;
1983    type Item = &'a mut T;
1984    fn into_iter(self) -> Self::IntoIter {
1985        self.iter_mut()
1986    }
1987}
1988
1989impl<T, U, const N: usize, const M: usize> PartialEq<SmallVec<U, M>> for SmallVec<T, N>
1990where
1991    T: PartialEq<U>,
1992{
1993    #[inline]
1994    fn eq(&self, other: &SmallVec<U, M>) -> bool {
1995        self.as_slice().eq(other.as_slice())
1996    }
1997}
1998impl<T, const N: usize> Eq for SmallVec<T, N> where T: Eq {}
1999
2000impl<T, U, const N: usize, const M: usize> PartialEq<[U; M]> for SmallVec<T, N>
2001where
2002    T: PartialEq<U>,
2003{
2004    #[inline]
2005    fn eq(&self, other: &[U; M]) -> bool {
2006        self[..] == other[..]
2007    }
2008}
2009
2010impl<T, U, const N: usize, const M: usize> PartialEq<&[U; M]> for SmallVec<T, N>
2011where
2012    T: PartialEq<U>,
2013{
2014    #[inline]
2015    fn eq(&self, other: &&[U; M]) -> bool {
2016        self[..] == other[..]
2017    }
2018}
2019
2020impl<T, U, const N: usize> PartialEq<[U]> for SmallVec<T, N>
2021where
2022    T: PartialEq<U>,
2023{
2024    #[inline]
2025    fn eq(&self, other: &[U]) -> bool {
2026        self[..] == other[..]
2027    }
2028}
2029
2030impl<T, U, const N: usize> PartialEq<&[U]> for SmallVec<T, N>
2031where
2032    T: PartialEq<U>,
2033{
2034    #[inline]
2035    fn eq(&self, other: &&[U]) -> bool {
2036        self[..] == other[..]
2037    }
2038}
2039
2040impl<T, U, const N: usize> PartialEq<&mut [U]> for SmallVec<T, N>
2041where
2042    T: PartialEq<U>,
2043{
2044    #[inline]
2045    fn eq(&self, other: &&mut [U]) -> bool {
2046        self[..] == other[..]
2047    }
2048}
2049
2050impl<T, const N: usize> PartialOrd for SmallVec<T, N>
2051where
2052    T: PartialOrd,
2053{
2054    #[inline]
2055    fn partial_cmp(&self, other: &SmallVec<T, N>) -> Option<core::cmp::Ordering> {
2056        self.as_slice().partial_cmp(other.as_slice())
2057    }
2058}
2059
2060impl<T, const N: usize> Ord for SmallVec<T, N>
2061where
2062    T: Ord,
2063{
2064    #[inline]
2065    fn cmp(&self, other: &SmallVec<T, N>) -> core::cmp::Ordering {
2066        self.as_slice().cmp(other.as_slice())
2067    }
2068}
2069
2070impl<T: Hash, const N: usize> Hash for SmallVec<T, N> {
2071    fn hash<H: Hasher>(&self, state: &mut H) {
2072        self.as_slice().hash(state)
2073    }
2074}
2075
2076impl<T, const N: usize> Borrow<[T]> for SmallVec<T, N> {
2077    #[inline]
2078    fn borrow(&self) -> &[T] {
2079        self.as_slice()
2080    }
2081}
2082
2083impl<T, const N: usize> BorrowMut<[T]> for SmallVec<T, N> {
2084    #[inline]
2085    fn borrow_mut(&mut self) -> &mut [T] {
2086        self.as_mut_slice()
2087    }
2088}
2089
2090impl<T, const N: usize> AsRef<[T]> for SmallVec<T, N> {
2091    #[inline]
2092    fn as_ref(&self) -> &[T] {
2093        self.as_slice()
2094    }
2095}
2096
2097impl<T, const N: usize> AsMut<[T]> for SmallVec<T, N> {
2098    #[inline]
2099    fn as_mut(&mut self) -> &mut [T] {
2100        self.as_mut_slice()
2101    }
2102}
2103
2104impl<T: Debug, const N: usize> Debug for SmallVec<T, N> {
2105    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2106        f.debug_list().entries(self.iter()).finish()
2107    }
2108}
2109
2110impl<T: Debug, const N: usize> Debug for IntoIter<T, N> {
2111    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2112        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
2113    }
2114}
2115
2116impl<T: Debug, const N: usize> Debug for Drain<'_, T, N> {
2117    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2118        f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
2119    }
2120}
2121
2122#[cfg(feature = "serde")]
2123#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
2124impl<T, const N: usize> Serialize for SmallVec<T, N>
2125where
2126    T: Serialize,
2127{
2128    fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
2129        let mut state = serializer.serialize_seq(Some(self.len()))?;
2130        for item in self {
2131            state.serialize_element(item)?;
2132        }
2133        state.end()
2134    }
2135}
2136
2137#[cfg(feature = "serde")]
2138#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
2139impl<'de, T, const N: usize> Deserialize<'de> for SmallVec<T, N>
2140where
2141    T: Deserialize<'de>,
2142{
2143    fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
2144        deserializer.deserialize_seq(SmallVecVisitor {
2145            phantom: PhantomData,
2146        })
2147    }
2148}
2149
2150#[cfg(feature = "serde")]
2151struct SmallVecVisitor<T, const N: usize> {
2152    phantom: PhantomData<T>,
2153}
2154
2155#[cfg(feature = "serde")]
2156impl<'de, T, const N: usize> Visitor<'de> for SmallVecVisitor<T, N>
2157where
2158    T: Deserialize<'de>,
2159{
2160    type Value = SmallVec<T, N>;
2161
2162    fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2163        formatter.write_str("a sequence")
2164    }
2165
2166    fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error>
2167    where
2168        B: SeqAccess<'de>,
2169    {
2170        use serde::de::Error;
2171        let len = seq.size_hint().unwrap_or(0);
2172        let mut values = SmallVec::new();
2173        values.try_reserve(len).map_err(B::Error::custom)?;
2174
2175        while let Some(value) = seq.next_element()? {
2176            values.push(value);
2177        }
2178
2179        Ok(values)
2180    }
2181}
2182
2183#[cfg(feature = "malloc_size_of")]
2184impl<T, const N: usize> MallocShallowSizeOf for SmallVec<T, N> {
2185    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
2186        if self.spilled() {
2187            unsafe { ops.malloc_size_of(self.as_ptr()) }
2188        } else {
2189            0
2190        }
2191    }
2192}
2193
2194#[cfg(feature = "malloc_size_of")]
2195impl<T: MallocSizeOf, const N: usize> MallocSizeOf for SmallVec<T, N> {
2196    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
2197        let mut n = self.shallow_size_of(ops);
2198        for elem in self.iter() {
2199            n += elem.size_of(ops);
2200        }
2201        n
2202    }
2203}
2204
2205#[cfg(feature = "std")]
2206#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
2207impl<const N: usize> io::Write for SmallVec<u8, N> {
2208    #[inline]
2209    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
2210        self.extend_from_slice(buf);
2211        Ok(buf.len())
2212    }
2213
2214    #[inline]
2215    fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
2216        self.extend_from_slice(buf);
2217        Ok(())
2218    }
2219
2220    #[inline]
2221    fn flush(&mut self) -> io::Result<()> {
2222        Ok(())
2223    }
2224}
2225
2226#[cfg(feature = "bytes")]
2227unsafe impl<const N: usize> BufMut for SmallVec<u8, N> {
2228    #[inline]
2229    fn remaining_mut(&self) -> usize {
2230        // A vector can never have more than isize::MAX bytes
2231        isize::MAX as usize - self.len()
2232    }
2233
2234    #[inline]
2235    unsafe fn advance_mut(&mut self, cnt: usize) {
2236        let len = self.len();
2237        let remaining = self.capacity() - len;
2238
2239        if remaining < cnt {
2240            panic!("advance out of bounds: the len is {remaining} but advancing by {cnt}");
2241        }
2242
2243        // Addition will not overflow since the sum is at most the capacity.
2244        self.set_len(len + cnt);
2245    }
2246
2247    #[inline]
2248    fn chunk_mut(&mut self) -> &mut UninitSlice {
2249        if self.capacity() == self.len() {
2250            self.reserve(64); // Grow the smallvec
2251        }
2252
2253        let cap = self.capacity();
2254        let len = self.len();
2255
2256        let ptr = self.as_mut_ptr();
2257        // SAFETY: Since `ptr` is valid for `cap` bytes, `ptr.add(len)` must be
2258        // valid for `cap - len` bytes. The subtraction will not underflow since
2259        // `len <= cap`.
2260        unsafe { UninitSlice::from_raw_parts_mut(ptr.add(len), cap - len) }
2261    }
2262
2263    // Specialize these methods so they can skip checking `remaining_mut`
2264    // and `advance_mut`.
2265    #[inline]
2266    fn put<T: bytes::Buf>(&mut self, mut src: T)
2267    where
2268        Self: Sized,
2269    {
2270        // In case the src isn't contiguous, reserve upfront.
2271        self.reserve(src.remaining());
2272
2273        while src.has_remaining() {
2274            let s = src.chunk();
2275            let l = s.len();
2276            self.extend_from_slice(s);
2277            src.advance(l);
2278        }
2279    }
2280
2281    #[inline]
2282    fn put_slice(&mut self, src: &[u8]) {
2283        self.extend_from_slice(src);
2284    }
2285
2286    #[inline]
2287    fn put_bytes(&mut self, val: u8, cnt: usize) {
2288        // If the addition overflows, then the `resize` will fail.
2289        let new_len = self.len().saturating_add(cnt);
2290        self.resize(new_len, val);
2291    }
2292}