1#![no_std]
61#![cfg_attr(docsrs, feature(doc_cfg))]
62#![cfg_attr(feature = "specialization", allow(incomplete_features))]
63#![cfg_attr(feature = "specialization", feature(specialization))]
64#![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))]
65
66#[doc(hidden)]
67pub extern crate alloc;
68
69#[cfg(any(test, feature = "std"))]
70extern crate std;
71
72#[cfg(test)]
73mod tests;
74
75use alloc::boxed::Box;
76use alloc::vec;
77use alloc::vec::Vec;
78
79use alloc::alloc::Layout;
80use core::borrow::Borrow;
81use core::borrow::BorrowMut;
82use core::fmt::Debug;
83use core::hash::{Hash, Hasher};
84use core::marker::PhantomData;
85use core::mem::align_of;
86use core::mem::size_of;
87use core::mem::ManuallyDrop;
88use core::mem::MaybeUninit;
89use core::ptr::addr_of;
90use core::ptr::addr_of_mut;
91use core::ptr::copy;
92use core::ptr::copy_nonoverlapping;
93use core::ptr::NonNull;
94
95#[cfg(feature = "bytes")]
96use bytes::{buf::UninitSlice, BufMut};
97#[cfg(feature = "malloc_size_of")]
98use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
99#[cfg(feature = "serde")]
100use serde::{
101 de::{Deserialize, Deserializer, SeqAccess, Visitor},
102 ser::{Serialize, SerializeSeq, Serializer},
103};
104#[cfg(feature = "std")]
105use std::io;
106
107#[derive(Debug)]
109pub enum CollectionAllocErr {
110 CapacityOverflow,
112 AllocErr {
114 layout: Layout,
116 },
117}
118impl core::fmt::Display for CollectionAllocErr {
119 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
120 write!(f, "Allocation error: {:?}", self)
121 }
122}
123
124#[cfg(feature = "std")]
125#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
126impl std::error::Error for CollectionAllocErr {}
127
128#[repr(C)]
135pub union RawSmallVec<T, const N: usize> {
136 inline: ManuallyDrop<MaybeUninit<[T; N]>>,
137 heap: (NonNull<T>, usize),
138}
139
140#[inline]
141fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T {
142 match result {
143 Ok(x) => x,
144 Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
145 Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout),
146 }
147}
148
149impl<T, const N: usize> RawSmallVec<T, N> {
150 #[inline]
151 const fn is_zst() -> bool {
152 size_of::<T>() == 0
153 }
154
155 #[inline]
156 const fn new() -> Self {
157 Self::new_inline(MaybeUninit::uninit())
158 }
159 #[inline]
160 const fn new_inline(inline: MaybeUninit<[T; N]>) -> Self {
161 Self {
162 inline: ManuallyDrop::new(inline),
163 }
164 }
165 #[inline]
166 const fn new_heap(ptr: NonNull<T>, capacity: usize) -> Self {
167 Self {
168 heap: (ptr, capacity),
169 }
170 }
171
172 #[inline]
173 const fn as_ptr_inline(&self) -> *const T {
174 (unsafe { addr_of!(self.inline) }) as *const T
178 }
179
180 #[inline]
181 fn as_mut_ptr_inline(&mut self) -> *mut T {
182 (unsafe { addr_of_mut!(self.inline) }) as *mut T
184 }
185
186 #[inline]
190 const unsafe fn as_ptr_heap(&self) -> *const T {
191 self.heap.0.as_ptr()
192 }
193
194 #[inline]
198 unsafe fn as_mut_ptr_heap(&mut self) -> *mut T {
199 self.heap.0.as_ptr()
200 }
201
202 unsafe fn try_grow_raw(
207 &mut self,
208 len: TaggedLen,
209 new_capacity: usize,
210 ) -> Result<(), CollectionAllocErr> {
211 use alloc::alloc::{alloc, realloc};
212 debug_assert!(!Self::is_zst());
213 debug_assert!(new_capacity > 0);
214 debug_assert!(new_capacity >= len.value(Self::is_zst()));
215
216 let was_on_heap = len.on_heap(Self::is_zst());
217 let ptr = if was_on_heap {
218 self.as_mut_ptr_heap()
219 } else {
220 self.as_mut_ptr_inline()
221 };
222 let len = len.value(Self::is_zst());
223
224 let new_layout =
225 Layout::array::<T>(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?;
226 if new_layout.size() > isize::MAX as usize {
227 return Err(CollectionAllocErr::CapacityOverflow);
228 }
229
230 let new_ptr = if len == 0 || !was_on_heap {
231 let new_ptr = alloc(new_layout) as *mut T; let new_ptr =
234 NonNull::new(new_ptr).ok_or(CollectionAllocErr::AllocErr { layout: new_layout })?;
235 copy_nonoverlapping(ptr, new_ptr.as_ptr(), len);
236 new_ptr
237 } else {
238 let old_layout =
243 Layout::from_size_align_unchecked(self.heap.1 * size_of::<T>(), align_of::<T>());
244
245 let new_ptr = realloc(ptr as *mut u8, old_layout, new_layout.size()) as *mut T;
251 NonNull::new(new_ptr).ok_or(CollectionAllocErr::AllocErr { layout: new_layout })?
252 };
253 *self = Self::new_heap(new_ptr, new_capacity);
254 Ok(())
255 }
256}
257
258#[repr(transparent)]
266#[derive(Clone, Copy)]
267struct TaggedLen(usize);
268
269impl TaggedLen {
270 #[inline]
271 pub const fn new(len: usize, on_heap: bool, is_zst: bool) -> Self {
272 if is_zst {
273 debug_assert!(!on_heap);
274 TaggedLen(len)
275 } else {
276 debug_assert!(len < isize::MAX as usize);
277 TaggedLen((len << 1) | on_heap as usize)
278 }
279 }
280
281 #[inline]
282 #[must_use]
283 pub const fn on_heap(self, is_zst: bool) -> bool {
284 if is_zst {
285 false
286 } else {
287 (self.0 & 1_usize) == 1
288 }
289 }
290
291 #[inline]
292 pub const fn value(self, is_zst: bool) -> usize {
293 if is_zst {
294 self.0
295 } else {
296 self.0 >> 1
297 }
298 }
299}
300
301#[repr(C)]
302pub struct SmallVec<T, const N: usize> {
303 len: TaggedLen,
304 raw: RawSmallVec<T, N>,
305 _marker: PhantomData<T>,
306}
307
308unsafe impl<T: Send, const N: usize> Send for SmallVec<T, N> {}
309unsafe impl<T: Sync, const N: usize> Sync for SmallVec<T, N> {}
310
311pub struct Drain<'a, T: 'a, const N: usize> {
317 tail_start: usize,
325 tail_len: usize,
326 iter: core::slice::Iter<'a, T>,
327 vec: core::ptr::NonNull<SmallVec<T, N>>,
328}
329
330impl<'a, T: 'a, const N: usize> Iterator for Drain<'a, T, N> {
331 type Item = T;
332
333 #[inline]
334 fn next(&mut self) -> Option<T> {
335 self.iter
338 .next()
339 .map(|reference| unsafe { core::ptr::read(reference) })
340 }
341
342 #[inline]
343 fn size_hint(&self) -> (usize, Option<usize>) {
344 self.iter.size_hint()
345 }
346}
347
348impl<'a, T: 'a, const N: usize> DoubleEndedIterator for Drain<'a, T, N> {
349 #[inline]
350 fn next_back(&mut self) -> Option<T> {
351 self.iter
353 .next_back()
354 .map(|reference| unsafe { core::ptr::read(reference) })
355 }
356}
357
358impl<T, const N: usize> ExactSizeIterator for Drain<'_, T, N> {
359 #[inline]
360 fn len(&self) -> usize {
361 self.iter.len()
362 }
363}
364
365impl<T, const N: usize> core::iter::FusedIterator for Drain<'_, T, N> {}
366
367impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> {
368 fn drop(&mut self) {
369 if core::mem::needs_drop::<T>() {
370 self.for_each(drop);
371 }
372
373 if self.tail_len > 0 {
374 unsafe {
377 let source_vec = self.vec.as_mut();
378
379 let start = source_vec.len();
380 let tail = self.tail_start;
381 if tail != start {
382 let ptr = source_vec.as_mut_ptr();
385 let src = ptr.add(tail);
386 let dst = ptr.add(start);
387 copy(src, dst, self.tail_len);
388 }
389 source_vec.set_len(start + self.tail_len);
390 }
391 }
392 }
393}
394
395#[cfg(feature = "extract_if")]
396pub struct ExtractIf<'a, T, const N: usize, F>
402where
403 F: FnMut(&mut T) -> bool,
404{
405 vec: &'a mut SmallVec<T, N>,
406 idx: usize,
408 del: usize,
410 old_len: usize,
412 pred: F,
414}
415
416#[cfg(feature = "extract_if")]
417impl<T, const N: usize, F> core::fmt::Debug for ExtractIf<'_, T, N, F>
418where
419 F: FnMut(&mut T) -> bool,
420 T: core::fmt::Debug,
421{
422 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
423 f.debug_tuple("ExtractIf")
424 .field(&self.vec.as_slice())
425 .finish()
426 }
427}
428
429#[cfg(feature = "extract_if")]
430impl<T, F, const N: usize> Iterator for ExtractIf<'_, T, N, F>
431where
432 F: FnMut(&mut T) -> bool,
433{
434 type Item = T;
435
436 fn next(&mut self) -> Option<T> {
437 unsafe {
438 while self.idx < self.old_len {
439 let i = self.idx;
440 let v = core::slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
441 let drained = (self.pred)(&mut v[i]);
442 self.idx += 1;
446 if drained {
447 self.del += 1;
448 return Some(core::ptr::read(&v[i]));
449 } else if self.del > 0 {
450 let del = self.del;
451 let src: *const T = &v[i];
452 let dst: *mut T = &mut v[i - del];
453 core::ptr::copy_nonoverlapping(src, dst, 1);
454 }
455 }
456 None
457 }
458 }
459
460 fn size_hint(&self) -> (usize, Option<usize>) {
461 (0, Some(self.old_len - self.idx))
462 }
463}
464
465#[cfg(feature = "extract_if")]
466impl<T, F, const N: usize> Drop for ExtractIf<'_, T, N, F>
467where
468 F: FnMut(&mut T) -> bool,
469{
470 fn drop(&mut self) {
471 unsafe {
472 if self.idx < self.old_len && self.del > 0 {
473 let ptr = self.vec.as_mut_ptr();
480 let src = ptr.add(self.idx);
481 let dst = src.sub(self.del);
482 let tail_len = self.old_len - self.idx;
483 src.copy_to(dst, tail_len);
484 }
485 self.vec.set_len(self.old_len - self.del);
486 }
487 }
488}
489
490pub struct IntoIter<T, const N: usize> {
496 raw: RawSmallVec<T, N>,
502 begin: usize,
503 end: TaggedLen,
504 _marker: PhantomData<T>,
505}
506
507unsafe impl<T, const N: usize> Send for IntoIter<T, N> where T: Send {}
510unsafe impl<T, const N: usize> Sync for IntoIter<T, N> where T: Sync {}
511
512impl<T, const N: usize> IntoIter<T, N> {
513 #[inline]
514 const fn is_zst() -> bool {
515 size_of::<T>() == 0
516 }
517
518 #[inline]
519 const fn as_ptr(&self) -> *const T {
520 let on_heap = self.end.on_heap(Self::is_zst());
521 if on_heap {
522 unsafe { self.raw.as_ptr_heap() }
524 } else {
525 self.raw.as_ptr_inline()
526 }
527 }
528
529 #[inline]
530 fn as_mut_ptr(&mut self) -> *mut T {
531 let on_heap = self.end.on_heap(Self::is_zst());
532 if on_heap {
533 unsafe { self.raw.as_mut_ptr_heap() }
535 } else {
536 self.raw.as_mut_ptr_inline()
537 }
538 }
539
540 #[inline]
541 pub fn as_slice(&self) -> &[T] {
542 unsafe {
545 let ptr = self.as_ptr();
546 core::slice::from_raw_parts(
547 ptr.add(self.begin),
548 self.end.value(Self::is_zst()) - self.begin,
549 )
550 }
551 }
552
553 #[inline]
554 pub fn as_mut_slice(&mut self) -> &mut [T] {
555 unsafe {
557 let ptr = self.as_mut_ptr();
558 core::slice::from_raw_parts_mut(
559 ptr.add(self.begin),
560 self.end.value(Self::is_zst()) - self.begin,
561 )
562 }
563 }
564}
565
566impl<T, const N: usize> Iterator for IntoIter<T, N> {
567 type Item = T;
568
569 #[inline]
570 fn next(&mut self) -> Option<Self::Item> {
571 if self.begin == self.end.value(Self::is_zst()) {
572 None
573 } else {
574 unsafe {
576 let ptr = self.as_mut_ptr();
577 let value = ptr.add(self.begin).read();
578 self.begin += 1;
579 Some(value)
580 }
581 }
582 }
583
584 #[inline]
585 fn size_hint(&self) -> (usize, Option<usize>) {
586 let size = self.end.value(Self::is_zst()) - self.begin;
587 (size, Some(size))
588 }
589}
590
591impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
592 #[inline]
593 fn next_back(&mut self) -> Option<Self::Item> {
594 let mut end = self.end.value(Self::is_zst());
595 if self.begin == end {
596 None
597 } else {
598 unsafe {
600 let ptr = self.as_mut_ptr();
601 let on_heap = self.end.on_heap(Self::is_zst());
602 end -= 1;
603 self.end = TaggedLen::new(end, on_heap, Self::is_zst());
604 let value = ptr.add(end).read();
605 Some(value)
606 }
607 }
608 }
609}
610impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {}
611impl<T, const N: usize> core::iter::FusedIterator for IntoIter<T, N> {}
612
613impl<T, const N: usize> SmallVec<T, N> {
614 #[inline]
615 const fn is_zst() -> bool {
616 size_of::<T>() == 0
617 }
618
619 #[inline]
620 pub const fn new() -> SmallVec<T, N> {
621 Self {
622 len: TaggedLen::new(0, false, Self::is_zst()),
623 raw: RawSmallVec::new(),
624 _marker: PhantomData,
625 }
626 }
627
628 #[inline]
629 pub fn with_capacity(capacity: usize) -> Self {
630 let mut this = Self::new();
631 if capacity > Self::inline_size() {
632 this.grow(capacity);
633 }
634 this
635 }
636
637 #[inline]
638 pub fn from_vec(vec: Vec<T>) -> Self {
639 if vec.capacity() == 0 {
640 return Self::new();
641 }
642
643 if Self::is_zst() {
644 let mut vec = vec;
649 let len = vec.len();
650
651 unsafe { vec.set_len(0) };
654 Self {
655 len: TaggedLen::new(len, false, Self::is_zst()),
656 raw: RawSmallVec::new(),
657 _marker: PhantomData,
658 }
659 } else {
660 let mut vec = ManuallyDrop::new(vec);
661 let len = vec.len();
662 let cap = vec.capacity();
663 let ptr = unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) };
666
667 Self {
668 len: TaggedLen::new(len, true, Self::is_zst()),
669 raw: RawSmallVec::new_heap(ptr, cap),
670 _marker: PhantomData,
671 }
672 }
673 }
674
675 #[inline]
676 pub const fn from_buf(buf: [T; N]) -> Self {
677 Self {
679 len: TaggedLen::new(N, false, Self::is_zst()),
680 raw: RawSmallVec::new_inline(MaybeUninit::new(buf)),
681 _marker: PhantomData,
682 }
683 }
684
685 #[inline]
686 pub fn from_buf_and_len(buf: [T; N], len: usize) -> Self {
687 assert!(len <= N);
688 let mut vec = Self {
690 len: TaggedLen::new(len, false, Self::is_zst()),
691 raw: RawSmallVec::new_inline(MaybeUninit::new(buf)),
692 _marker: PhantomData,
693 };
694 unsafe {
696 let remainder_ptr = vec.raw.as_mut_ptr_inline().add(len);
698 let remainder_len = N - len;
699
700 core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(
702 remainder_ptr,
703 remainder_len,
704 ));
705 }
706
707 vec
708 }
709
710 #[inline]
730 pub const unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<[T; N]>, len: usize) -> Self {
731 debug_assert!(len <= N);
732 Self {
733 len: TaggedLen::new(len, false, Self::is_zst()),
734 raw: RawSmallVec::new_inline(buf),
735 _marker: PhantomData,
736 }
737 }
738
739 #[inline]
745 unsafe fn set_on_heap(&mut self) {
746 self.len = TaggedLen::new(self.len(), true, Self::is_zst());
747 }
748
749 #[inline]
755 unsafe fn set_inline(&mut self) {
756 self.len = TaggedLen::new(self.len(), false, Self::is_zst());
757 }
758
759 #[inline]
769 pub unsafe fn set_len(&mut self, new_len: usize) {
770 debug_assert!(new_len <= self.capacity());
771 let on_heap = self.len.on_heap(Self::is_zst());
772 self.len = TaggedLen::new(new_len, on_heap, Self::is_zst());
773 }
774
775 #[inline]
776 pub const fn inline_size() -> usize {
777 if Self::is_zst() {
778 usize::MAX
779 } else {
780 N
781 }
782 }
783
784 #[inline]
785 pub const fn len(&self) -> usize {
786 self.len.value(Self::is_zst())
787 }
788
789 #[must_use]
790 #[inline]
791 pub fn is_empty(&self) -> bool {
792 self.len() == 0
793 }
794
795 #[inline]
796 pub const fn capacity(&self) -> usize {
797 if self.len.on_heap(Self::is_zst()) {
798 unsafe { self.raw.heap.1 }
800 } else {
801 Self::inline_size()
802 }
803 }
804
805 #[inline]
806 pub const fn spilled(&self) -> bool {
807 self.len.on_heap(Self::is_zst())
808 }
809
810 #[inline]
835 pub fn split_off(&mut self, at: usize) -> Self {
836 let len = self.len();
837 assert!(at <= len);
838
839 let other_len = len - at;
840 let mut other = Self::with_capacity(other_len);
841
842 unsafe {
844 self.set_len(at);
845 other.set_len(other_len);
846
847 core::ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other_len);
848 }
849 other
850 }
851
852 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, N>
853 where
854 R: core::ops::RangeBounds<usize>,
855 {
856 use core::ops::Bound::*;
857
858 let len = self.len();
859 let start = match range.start_bound() {
860 Included(&n) => n,
861 Excluded(&n) => n.checked_add(1).expect("Range start out of bounds"),
862 Unbounded => 0,
863 };
864 let end = match range.end_bound() {
865 Included(&n) => n.checked_add(1).expect("Range end out of bounds"),
866 Excluded(&n) => n,
867 Unbounded => len,
868 };
869
870 assert!(start <= end);
871 assert!(end <= len);
872
873 unsafe {
874 self.set_len(start);
876
877 let range_slice = core::slice::from_raw_parts(self.as_ptr().add(start), end - start);
879
880 Drain {
882 tail_start: end,
883 tail_len: len - end,
884 iter: range_slice.iter(),
885 vec: core::ptr::NonNull::new_unchecked(self as *mut _),
887 }
888 }
889 }
890
891 #[cfg(feature = "extract_if")]
892 pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, N, F>
943 where
944 F: FnMut(&mut T) -> bool,
945 {
946 let old_len = self.len();
947
948 unsafe {
950 self.set_len(0);
951 }
952
953 ExtractIf {
954 vec: self,
955 idx: 0,
956 del: 0,
957 old_len,
958 pred: filter,
959 }
960 }
961
962 #[inline]
963 pub fn push(&mut self, value: T) {
964 let len = self.len();
965 if len == self.capacity() {
966 self.reserve(1);
967 }
968 let ptr = unsafe { self.as_mut_ptr().add(len) };
970 unsafe { ptr.write(value) };
973 unsafe { self.set_len(len + 1) }
974 }
975
976 #[inline]
977 pub fn pop(&mut self) -> Option<T> {
978 if self.is_empty() {
979 None
980 } else {
981 let len = self.len() - 1;
982 unsafe { self.set_len(len) };
984 let value = unsafe { self.as_mut_ptr().add(len).read() };
987 Some(value)
988 }
989 }
990
991 #[inline]
992 pub fn append<const M: usize>(&mut self, other: &mut SmallVec<T, M>) {
993 let len = self.len();
995 let other_len = other.len();
996 let total_len = len + other_len;
997 if total_len > self.capacity() {
998 self.reserve(other_len);
999 }
1000
1001 let ptr = unsafe { self.as_mut_ptr().add(len) };
1003 unsafe { other.set_len(0) }
1004 unsafe { copy_nonoverlapping(other.as_ptr(), ptr, other_len) };
1007 unsafe { self.set_len(total_len) }
1008 }
1009
1010 #[inline]
1011 pub fn grow(&mut self, new_capacity: usize) {
1012 infallible(self.try_grow(new_capacity));
1013 }
1014
1015 #[cold]
1016 pub fn try_grow(&mut self, new_capacity: usize) -> Result<(), CollectionAllocErr> {
1017 if Self::is_zst() {
1018 return Ok(());
1019 }
1020
1021 let len = self.len();
1022 assert!(new_capacity >= len);
1023
1024 if new_capacity > Self::inline_size() {
1025 let result = unsafe { self.raw.try_grow_raw(self.len, new_capacity) };
1027
1028 if result.is_ok() {
1029 unsafe { self.set_on_heap() };
1031 }
1032 result
1033 } else {
1034 if self.spilled() {
1036 unsafe {
1037 let (ptr, old_cap) = self.raw.heap;
1039 copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len);
1044 drop(DropDealloc {
1045 ptr: ptr.cast(),
1046 size_bytes: old_cap * size_of::<T>(),
1047 align: align_of::<T>(),
1048 });
1049 self.set_inline();
1050 }
1051 }
1052 Ok(())
1053 }
1054 }
1055
1056 #[inline]
1057 pub fn reserve(&mut self, additional: usize) {
1058 if additional > self.capacity() - self.len() {
1060 let new_capacity = infallible(
1061 self.len()
1062 .checked_add(additional)
1063 .and_then(usize::checked_next_power_of_two)
1064 .ok_or(CollectionAllocErr::CapacityOverflow),
1065 );
1066 self.grow(new_capacity);
1067 }
1068 }
1069
1070 #[inline]
1071 pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1072 if additional > self.capacity() - self.len() {
1073 let new_capacity = self
1074 .len()
1075 .checked_add(additional)
1076 .and_then(usize::checked_next_power_of_two)
1077 .ok_or(CollectionAllocErr::CapacityOverflow)?;
1078 self.try_grow(new_capacity)
1079 } else {
1080 Ok(())
1081 }
1082 }
1083
1084 #[inline]
1085 pub fn reserve_exact(&mut self, additional: usize) {
1086 if additional > self.capacity() - self.len() {
1088 let new_capacity = infallible(
1089 self.len()
1090 .checked_add(additional)
1091 .ok_or(CollectionAllocErr::CapacityOverflow),
1092 );
1093 self.grow(new_capacity);
1094 }
1095 }
1096
1097 #[inline]
1098 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1099 if additional > self.capacity() - self.len() {
1100 let new_capacity = self
1101 .len()
1102 .checked_add(additional)
1103 .ok_or(CollectionAllocErr::CapacityOverflow)?;
1104 self.try_grow(new_capacity)
1105 } else {
1106 Ok(())
1107 }
1108 }
1109
1110 #[inline]
1111 pub fn shrink_to_fit(&mut self) {
1112 if !self.spilled() {
1113 return;
1114 }
1115 let len = self.len();
1116 if len <= Self::inline_size() {
1117 unsafe {
1119 let (ptr, capacity) = self.raw.heap;
1120 self.raw = RawSmallVec::new_inline(MaybeUninit::uninit());
1121 copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len);
1122 self.set_inline();
1123 alloc::alloc::dealloc(
1124 ptr.cast().as_ptr(),
1125 Layout::from_size_align_unchecked(capacity * size_of::<T>(), align_of::<T>()),
1126 );
1127 }
1128 } else if len < self.capacity() {
1129 unsafe { infallible(self.raw.try_grow_raw(self.len, len)) };
1133 }
1134 }
1135
1136 #[inline]
1137 pub fn truncate(&mut self, len: usize) {
1138 let old_len = self.len();
1139 if len < old_len {
1140 unsafe {
1143 self.set_len(len);
1144 core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(
1145 self.as_mut_ptr().add(len),
1146 old_len - len,
1147 ))
1148 }
1149 }
1150 }
1151
1152 #[inline]
1153 pub fn as_slice(&self) -> &[T] {
1154 let len = self.len();
1155 let ptr = self.as_ptr();
1156 unsafe { core::slice::from_raw_parts(ptr, len) }
1158 }
1159
1160 #[inline]
1161 pub fn as_mut_slice(&mut self) -> &mut [T] {
1162 let len = self.len();
1163 let ptr = self.as_mut_ptr();
1164 unsafe { core::slice::from_raw_parts_mut(ptr, len) }
1166 }
1167
1168 #[inline]
1169 pub fn swap_remove(&mut self, index: usize) -> T {
1170 let len = self.len();
1171 assert!(index < len);
1172 let new_len = len - 1;
1174 unsafe {
1175 self.set_len(new_len);
1177 let ptr = self.as_mut_ptr();
1178 let last = ptr.add(new_len);
1179 let ith = ptr.add(index);
1180 let last_item = last.read();
1182 let ith_item = ith.read();
1184
1185 ith.write(last_item);
1189 ith_item
1190 }
1191 }
1192
1193 #[inline]
1194 pub fn clear(&mut self) {
1195 self.truncate(0);
1196 }
1197
1198 #[inline]
1199 pub fn remove(&mut self, index: usize) -> T {
1200 let len = self.len();
1201 assert!(index < len);
1202 let new_len = len - 1;
1203 unsafe {
1204 self.set_len(new_len);
1206 let ptr = self.as_mut_ptr();
1207 let ith = ptr.add(index);
1208 let ith_item = ith.read();
1210 copy(ith.add(1), ith, new_len - index);
1211 ith_item
1212 }
1213 }
1214
1215 #[inline]
1216 pub fn insert(&mut self, index: usize, value: T) {
1217 let len = self.len();
1218 assert!(index <= len);
1219 self.reserve(1);
1220 let ptr = self.as_mut_ptr();
1221 unsafe {
1222 if index < len {
1224 copy(ptr.add(index), ptr.add(index + 1), len - index);
1225 }
1226 ptr.add(index).write(value);
1228
1229 self.set_len(len + 1);
1231 }
1232 }
1233
1234 fn insert_many_impl<I: Iterator<Item = T>>(&mut self, mut index: usize, iter: I) {
1235 let len = self.len();
1236 if index == len {
1237 return self.extend(iter);
1238 }
1239
1240 let mut iter = iter.fuse();
1241 let (lower_bound, _) = iter.size_hint();
1242 self.reserve(lower_bound);
1243
1244 let count = unsafe {
1245 let ptr = self.as_mut_ptr();
1246 let count = insert_many_batch(ptr, index, lower_bound, len, &mut iter);
1248 self.set_len(len + count);
1251 count
1252 };
1253
1254 index += count;
1255 iter.enumerate()
1256 .for_each(|(i, item)| self.insert(index + i, item));
1257 }
1258
1259 #[inline]
1260 pub fn insert_many<I: IntoIterator<Item = T>>(&mut self, index: usize, iterable: I) {
1261 self.insert_many_impl(index, iterable.into_iter());
1262 }
1263
1264 #[inline]
1265 pub const fn as_ptr(&self) -> *const T {
1266 if self.len.on_heap(Self::is_zst()) {
1267 unsafe { self.raw.as_ptr_heap() }
1269 } else {
1270 self.raw.as_ptr_inline()
1271 }
1272 }
1273
1274 #[inline]
1275 pub fn as_mut_ptr(&mut self) -> *mut T {
1276 if self.len.on_heap(Self::is_zst()) {
1277 unsafe { self.raw.as_mut_ptr_heap() }
1279 } else {
1280 self.raw.as_mut_ptr_inline()
1281 }
1282 }
1283
1284 #[inline]
1285 pub fn into_vec(self) -> Vec<T> {
1286 let len = self.len();
1287 if !self.spilled() {
1288 let mut vec = Vec::with_capacity(len);
1289 let this = ManuallyDrop::new(self);
1290 unsafe {
1294 copy_nonoverlapping(this.raw.as_ptr_inline(), vec.as_mut_ptr(), len);
1295 vec.set_len(len);
1296 }
1297 vec
1298 } else {
1299 let this = ManuallyDrop::new(self);
1300 unsafe {
1308 let (ptr, cap) = this.raw.heap;
1309 Vec::from_raw_parts(ptr.as_ptr(), len, cap)
1310 }
1311 }
1312 }
1313
1314 #[inline]
1315 pub fn into_boxed_slice(self) -> Box<[T]> {
1316 self.into_vec().into_boxed_slice()
1317 }
1318
1319 #[inline]
1320 pub fn into_inner(self) -> Result<[T; N], Self> {
1321 if self.len() != N {
1322 Err(self)
1323 } else {
1324 let mut this = self;
1326 unsafe {
1328 this.set_len(0);
1329 }
1330 let ptr = this.as_ptr() as *const [T; N];
1331 unsafe { Ok(ptr.read()) }
1333 }
1334 }
1335
1336 pub fn retain<F: FnMut(&mut T) -> bool>(&mut self, mut f: F) {
1337 let mut del = 0;
1338 let len = self.len();
1339 let ptr = self.as_mut_ptr();
1340 for i in 0..len {
1341 unsafe {
1344 if !f(&mut *ptr.add(i)) {
1345 del += 1;
1346 } else if del > 0 {
1347 core::ptr::swap(ptr.add(i), ptr.add(i - del));
1348 }
1349 }
1350 }
1351 self.truncate(len - del);
1352 }
1353
1354 #[inline]
1355 pub fn retain_mut<F: FnMut(&mut T) -> bool>(&mut self, f: F) {
1356 self.retain(f)
1357 }
1358
1359 #[inline]
1360 pub fn dedup(&mut self)
1361 where
1362 T: PartialEq,
1363 {
1364 self.dedup_by(|a, b| a == b);
1365 }
1366
1367 #[inline]
1368 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1369 where
1370 F: FnMut(&mut T) -> K,
1371 K: PartialEq<K>,
1372 {
1373 self.dedup_by(|a, b| key(a) == key(b));
1374 }
1375
1376 #[inline]
1377 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1378 where
1379 F: FnMut(&mut T, &mut T) -> bool,
1380 {
1381 let len = self.len();
1384 if len <= 1 {
1385 return;
1386 }
1387
1388 let ptr = self.as_mut_ptr();
1389 let mut w: usize = 1;
1390
1391 unsafe {
1392 for r in 1..len {
1393 let p_r = ptr.add(r);
1394 let p_wm1 = ptr.add(w - 1);
1395 if !same_bucket(&mut *p_r, &mut *p_wm1) {
1396 if r != w {
1397 let p_w = p_wm1.add(1);
1398 core::ptr::swap(p_r, p_w);
1399 }
1400 w += 1;
1401 }
1402 }
1403 }
1404
1405 self.truncate(w);
1406 }
1407
1408 pub fn resize_with<F>(&mut self, new_len: usize, f: F)
1409 where
1410 F: FnMut() -> T,
1411 {
1412 let old_len = self.len();
1413 if old_len < new_len {
1414 let mut f = f;
1415 let additional = new_len - old_len;
1416 self.reserve(additional);
1417 for _ in 0..additional {
1418 self.push(f());
1419 }
1420 } else if old_len > new_len {
1421 self.truncate(new_len);
1422 }
1423 }
1424
1425 #[inline]
1432 pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
1433 unsafe {
1434 core::slice::from_raw_parts_mut(
1435 self.as_mut_ptr().add(self.len()) as *mut MaybeUninit<T>,
1436 self.capacity() - self.len(),
1437 )
1438 }
1439 }
1440
1441 #[inline]
1493 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> SmallVec<T, N> {
1494 assert!(!Self::is_zst());
1495
1496 let ptr = unsafe {
1499 debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer.");
1500 NonNull::new_unchecked(ptr)
1501 };
1502
1503 SmallVec {
1504 len: TaggedLen::new(length, true, Self::is_zst()),
1505 raw: RawSmallVec::new_heap(ptr, capacity),
1506 _marker: PhantomData,
1507 }
1508 }
1509
1510 fn extend_impl<I: Iterator<Item = T>>(&mut self, iter: I) {
1511 let mut iter = iter.fuse();
1512 let (lower_bound, _) = iter.size_hint();
1513 self.reserve(lower_bound);
1514 let mut capacity = self.capacity();
1515 let mut ptr = self.as_mut_ptr();
1516 unsafe {
1517 loop {
1518 let mut len = self.len();
1519 ptr = ptr.add(len);
1521 let mut guard = DropGuard { ptr, len: 0 };
1522 iter.by_ref().take(capacity - len).for_each(|item| {
1523 ptr.add(guard.len).write(item);
1524 guard.len += 1;
1525 });
1526 len += guard.len;
1527 core::mem::forget(guard);
1528 self.set_len(len);
1529 if let Some(item) = iter.next() {
1531 self.push(item);
1532 } else {
1533 return;
1534 }
1535 let (heap_ptr, heap_capacity) = self.raw.heap;
1537 ptr = heap_ptr.as_ptr();
1538 capacity = heap_capacity;
1539 }
1540 }
1541 }
1542}
1543
1544impl<T, const N: usize> Default for SmallVec<T, N> {
1545 #[inline]
1546 fn default() -> Self {
1547 Self::new()
1548 }
1549}
1550
1551impl<T: Copy, const N: usize> SmallVec<T, N> {
1552 #[inline]
1553 pub fn from_slice(slice: &[T]) -> Self {
1554 let len = slice.len();
1555 if len <= Self::inline_size() {
1556 let mut this = Self::new();
1557 unsafe {
1558 let ptr = this.raw.as_mut_ptr_inline();
1559 copy_nonoverlapping(slice.as_ptr(), ptr, len);
1560 this.set_len(len);
1561 }
1562 this
1563 } else {
1564 let mut this = Vec::with_capacity(len);
1565 unsafe {
1566 let ptr = this.as_mut_ptr();
1567 copy_nonoverlapping(slice.as_ptr(), ptr, len);
1568 this.set_len(len);
1569 }
1570 Self::from_vec(this)
1571 }
1572 }
1573
1574 #[inline]
1575 pub fn insert_from_slice(&mut self, index: usize, slice: &[T]) {
1576 let len = self.len();
1577 let other_len = slice.len();
1578 assert!(index <= len);
1579 self.reserve(other_len);
1580 unsafe {
1581 let base_ptr = self.as_mut_ptr();
1582 let ith_ptr = base_ptr.add(index);
1583 let shifted_ptr = base_ptr.add(index + other_len);
1584 copy(ith_ptr, shifted_ptr, len - index);
1586 copy_nonoverlapping(slice.as_ptr(), ith_ptr, other_len);
1588
1589 self.set_len(len + other_len);
1591 }
1592 }
1593
1594 #[inline]
1595 pub fn extend_from_slice(&mut self, slice: &[T]) {
1596 let len = self.len();
1597 let other_len = slice.len();
1598 self.reserve(other_len);
1599 unsafe {
1601 let base_ptr = self.as_mut_ptr();
1602 let end_ptr = base_ptr.add(len);
1603 copy_nonoverlapping(slice.as_ptr(), end_ptr, other_len);
1604 self.set_len(len + other_len);
1605 }
1606 }
1607}
1608
1609impl<T: Clone, const N: usize> SmallVec<T, N> {
1610 #[inline]
1611 pub fn resize(&mut self, len: usize, value: T) {
1612 let old_len = self.len();
1613 if len > old_len {
1614 self.extend(core::iter::repeat(value).take(len - old_len));
1615 } else {
1616 self.truncate(len);
1617 }
1618 }
1619
1620 #[inline]
1621 pub fn from_elem(elem: T, n: usize) -> Self {
1622 if n > Self::inline_size() {
1623 Self::from_vec(vec![elem; n])
1624 } else {
1625 let mut v = Self::new();
1626
1627 unsafe {
1628 let ptr = v.raw.as_mut_ptr_inline();
1629 let mut guard = DropGuard { ptr, len: 0 };
1630
1631 for i in 0..n {
1633 guard.len = i;
1634 ptr.add(i).write(elem.clone());
1635 }
1636 core::mem::forget(guard);
1637 v.set_len(n);
1639 }
1640 v
1641 }
1642 }
1643}
1644
1645struct DropShiftGuard<T> {
1646 ptr: *mut T,
1647 len: usize,
1648 shifted_ptr: *const T,
1649 shifted_len: usize,
1650}
1651impl<T> Drop for DropShiftGuard<T> {
1652 #[inline]
1653 fn drop(&mut self) {
1654 unsafe {
1655 core::ptr::slice_from_raw_parts_mut(self.ptr, self.len).drop_in_place();
1656 copy(self.shifted_ptr, self.ptr, self.shifted_len);
1657 }
1658 }
1659}
1660
1661struct DropGuard<T> {
1662 ptr: *mut T,
1663 len: usize,
1664}
1665impl<T> Drop for DropGuard<T> {
1666 #[inline]
1667 fn drop(&mut self) {
1668 unsafe {
1669 core::ptr::slice_from_raw_parts_mut(self.ptr, self.len).drop_in_place();
1670 }
1671 }
1672}
1673
1674#[inline]
1678unsafe fn insert_many_batch<T, I: Iterator<Item = T>>(
1679 ptr: *mut T,
1680 index: usize,
1681 lower_bound: usize,
1682 len: usize,
1683 iter: &mut I,
1684) -> usize {
1685 copy(ptr.add(index), ptr.add(index + lower_bound), len - index);
1687 let ptr_ith = ptr.add(index);
1688 let mut guard = DropShiftGuard {
1689 ptr: ptr_ith,
1690 len: 0,
1691 shifted_ptr: ptr_ith.add(lower_bound),
1692 shifted_len: len - index,
1693 };
1694 iter.take(lower_bound).enumerate().for_each(|(i, item)| {
1695 ptr_ith.add(i).write(item);
1696 guard.len = i + 1;
1697 });
1698 let count = guard.len;
1699 core::mem::forget(guard);
1700
1701 if count < lower_bound {
1702 copy(ptr_ith.add(lower_bound), ptr_ith.add(count), len - index);
1703 }
1704 count
1705}
1706
1707impl<T, const N: usize> Extend<T> for SmallVec<T, N> {
1708 #[inline]
1709 fn extend<I: IntoIterator<Item = T>>(&mut self, iterable: I) {
1710 self.extend_impl(iterable.into_iter());
1711 }
1712}
1713
1714struct DropDealloc {
1715 ptr: NonNull<u8>,
1716 size_bytes: usize,
1717 align: usize,
1718}
1719
1720impl Drop for DropDealloc {
1721 #[inline]
1722 fn drop(&mut self) {
1723 unsafe {
1724 if self.size_bytes > 0 {
1725 alloc::alloc::dealloc(
1726 self.ptr.as_ptr(),
1727 Layout::from_size_align_unchecked(self.size_bytes, self.align),
1728 );
1729 }
1730 }
1731 }
1732}
1733
1734#[cfg(feature = "may_dangle")]
1735unsafe impl<#[may_dangle] T, const N: usize> Drop for SmallVec<T, N> {
1736 fn drop(&mut self) {
1737 let on_heap = self.spilled();
1738 let len = self.len();
1739 let ptr = self.as_mut_ptr();
1740 unsafe {
1743 let _drop_dealloc = if on_heap {
1744 let capacity = self.capacity();
1745 Some(DropDealloc {
1746 ptr: NonNull::new_unchecked(ptr as *mut u8),
1747 size_bytes: capacity * size_of::<T>(),
1748 align: align_of::<T>(),
1749 })
1750 } else {
1751 None
1752 };
1753 core::ptr::slice_from_raw_parts_mut(ptr, len).drop_in_place();
1754 }
1755 }
1756}
1757
1758#[cfg(not(feature = "may_dangle"))]
1759impl<T, const N: usize> Drop for SmallVec<T, N> {
1760 fn drop(&mut self) {
1761 let on_heap = self.spilled();
1762 let len = self.len();
1763 let ptr = self.as_mut_ptr();
1764 unsafe {
1766 let _drop_dealloc = if on_heap {
1767 let capacity = self.capacity();
1768 Some(DropDealloc {
1769 ptr: NonNull::new_unchecked(ptr as *mut u8),
1770 size_bytes: capacity * size_of::<T>(),
1771 align: align_of::<T>(),
1772 })
1773 } else {
1774 None
1775 };
1776 core::ptr::slice_from_raw_parts_mut(ptr, len).drop_in_place();
1777 }
1778 }
1779}
1780
1781impl<T, const N: usize> Drop for IntoIter<T, N> {
1782 fn drop(&mut self) {
1783 unsafe {
1785 let is_zst = size_of::<T>() == 0;
1786 let on_heap = self.end.on_heap(is_zst);
1787 let begin = self.begin;
1788 let end = self.end.value(is_zst);
1789 let ptr = self.as_mut_ptr();
1790 let _drop_dealloc = if on_heap {
1791 let capacity = self.raw.heap.1;
1792 Some(DropDealloc {
1793 ptr: NonNull::new_unchecked(ptr as *mut u8),
1794 size_bytes: capacity * size_of::<T>(),
1795 align: align_of::<T>(),
1796 })
1797 } else {
1798 None
1799 };
1800 core::ptr::slice_from_raw_parts_mut(ptr.add(begin), end - begin).drop_in_place();
1801 }
1802 }
1803}
1804
1805impl<T, const N: usize> core::ops::Deref for SmallVec<T, N> {
1806 type Target = [T];
1807
1808 #[inline]
1809 fn deref(&self) -> &Self::Target {
1810 self.as_slice()
1811 }
1812}
1813impl<T, const N: usize> core::ops::DerefMut for SmallVec<T, N> {
1814 #[inline]
1815 fn deref_mut(&mut self) -> &mut Self::Target {
1816 self.as_mut_slice()
1817 }
1818}
1819
1820impl<T, const N: usize> core::iter::FromIterator<T> for SmallVec<T, N> {
1821 #[inline]
1822 fn from_iter<I: IntoIterator<Item = T>>(iterable: I) -> Self {
1823 let mut vec = Self::new();
1824 vec.extend_impl(iterable.into_iter());
1825 vec
1826 }
1827}
1828
1829#[cfg(feature = "specialization")]
1830trait SpecFrom {
1831 type Element;
1832 fn spec_from(slice: &[Self::Element]) -> Self;
1833}
1834
1835#[cfg(feature = "specialization")]
1836impl<T: Clone, const N: usize> SpecFrom for SmallVec<T, N> {
1837 type Element = T;
1838
1839 default fn spec_from(slice: &[Self::Element]) -> Self {
1840 slice.iter().cloned().collect()
1841 }
1842}
1843
1844#[cfg(feature = "specialization")]
1845impl<T: Copy, const N: usize> SpecFrom for SmallVec<T, N> {
1846 fn spec_from(slice: &[Self::Element]) -> Self {
1847 Self::from_slice(slice)
1848 }
1849}
1850
1851#[cfg(feature = "specialization")]
1852impl<'a, T: Clone, const N: usize> From<&'a [T]> for SmallVec<T, N> {
1853 fn from(slice: &'a [T]) -> Self {
1854 <Self as SpecFrom>::spec_from(slice)
1855 }
1856}
1857
1858#[cfg(not(feature = "specialization"))]
1859impl<'a, T: Clone, const N: usize> From<&'a [T]> for SmallVec<T, N> {
1860 fn from(slice: &'a [T]) -> Self {
1861 slice.iter().cloned().collect()
1862 }
1863}
1864
1865impl<T, const N: usize, const M: usize> From<[T; M]> for SmallVec<T, N> {
1866 fn from(array: [T; M]) -> Self {
1867 if M > N {
1868 Self::from(Vec::from(array))
1871 } else {
1872 let mut this = Self::new();
1874 debug_assert!(M <= this.capacity());
1875 let array = ManuallyDrop::new(array);
1876 unsafe {
1878 copy_nonoverlapping(array.as_ptr(), this.as_mut_ptr(), M);
1879 this.set_len(M);
1880 }
1881 this
1882 }
1883 }
1884}
1885impl<T, const N: usize> From<Vec<T>> for SmallVec<T, N> {
1886 fn from(array: Vec<T>) -> Self {
1887 Self::from_vec(array)
1888 }
1889}
1890
1891impl<T: Clone, const N: usize> Clone for SmallVec<T, N> {
1892 #[inline]
1893 fn clone(&self) -> SmallVec<T, N> {
1894 SmallVec::from(self.as_slice())
1895 }
1896
1897 fn clone_from(&mut self, source: &Self) {
1898 self.truncate(source.len());
1902
1903 let init = unsafe { source.get_unchecked(..self.len()) };
1906 let tail = unsafe { source.get_unchecked(self.len()..) };
1907
1908 self.clone_from_slice(init);
1910 self.extend(tail.iter().cloned());
1911 }
1912}
1913
1914impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
1915 #[inline]
1916 fn clone(&self) -> IntoIter<T, N> {
1917 SmallVec::from(self.as_slice()).into_iter()
1918 }
1919}
1920
1921#[macro_export]
1922macro_rules! smallvec {
1923 (@one $x:expr) => (1usize);
1925 ($elem:expr; $n:expr) => ({
1926 $crate::SmallVec::from_elem($elem, $n)
1927 });
1928 ($($x:expr),*$(,)?) => ({
1929 let count = 0usize $(+ $crate::smallvec!(@one $x))*;
1930 #[allow(unused_mut)]
1931 let mut vec = $crate::SmallVec::new();
1932 if count <= vec.capacity() {
1933 $(vec.push($x);)*
1934 vec
1935 } else {
1936 $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*])
1937 }
1938 });
1939}
1940
1941#[macro_export]
1942macro_rules! smallvec_inline {
1943 (@one $x:expr) => (1usize);
1945 ($elem:expr; $n:expr) => ({
1946 $crate::SmallVec::<_, $n>::from_buf([$elem; $n])
1947 });
1948 ($($x:expr),+ $(,)?) => ({
1949 const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*;
1950 $crate::SmallVec::<_, N>::from_buf([$($x,)*])
1951 });
1952}
1953
1954impl<T, const N: usize> IntoIterator for SmallVec<T, N> {
1955 type IntoIter = IntoIter<T, N>;
1956 type Item = T;
1957 fn into_iter(self) -> Self::IntoIter {
1958 unsafe {
1961 let this = ManuallyDrop::new(self);
1963 IntoIter {
1964 raw: (&this.raw as *const RawSmallVec<T, N>).read(),
1965 begin: 0,
1966 end: this.len,
1967 _marker: PhantomData,
1968 }
1969 }
1970 }
1971}
1972
1973impl<'a, T, const N: usize> IntoIterator for &'a SmallVec<T, N> {
1974 type IntoIter = core::slice::Iter<'a, T>;
1975 type Item = &'a T;
1976 fn into_iter(self) -> Self::IntoIter {
1977 self.iter()
1978 }
1979}
1980
1981impl<'a, T, const N: usize> IntoIterator for &'a mut SmallVec<T, N> {
1982 type IntoIter = core::slice::IterMut<'a, T>;
1983 type Item = &'a mut T;
1984 fn into_iter(self) -> Self::IntoIter {
1985 self.iter_mut()
1986 }
1987}
1988
1989impl<T, U, const N: usize, const M: usize> PartialEq<SmallVec<U, M>> for SmallVec<T, N>
1990where
1991 T: PartialEq<U>,
1992{
1993 #[inline]
1994 fn eq(&self, other: &SmallVec<U, M>) -> bool {
1995 self.as_slice().eq(other.as_slice())
1996 }
1997}
1998impl<T, const N: usize> Eq for SmallVec<T, N> where T: Eq {}
1999
2000impl<T, U, const N: usize, const M: usize> PartialEq<[U; M]> for SmallVec<T, N>
2001where
2002 T: PartialEq<U>,
2003{
2004 #[inline]
2005 fn eq(&self, other: &[U; M]) -> bool {
2006 self[..] == other[..]
2007 }
2008}
2009
2010impl<T, U, const N: usize, const M: usize> PartialEq<&[U; M]> for SmallVec<T, N>
2011where
2012 T: PartialEq<U>,
2013{
2014 #[inline]
2015 fn eq(&self, other: &&[U; M]) -> bool {
2016 self[..] == other[..]
2017 }
2018}
2019
2020impl<T, U, const N: usize> PartialEq<[U]> for SmallVec<T, N>
2021where
2022 T: PartialEq<U>,
2023{
2024 #[inline]
2025 fn eq(&self, other: &[U]) -> bool {
2026 self[..] == other[..]
2027 }
2028}
2029
2030impl<T, U, const N: usize> PartialEq<&[U]> for SmallVec<T, N>
2031where
2032 T: PartialEq<U>,
2033{
2034 #[inline]
2035 fn eq(&self, other: &&[U]) -> bool {
2036 self[..] == other[..]
2037 }
2038}
2039
2040impl<T, U, const N: usize> PartialEq<&mut [U]> for SmallVec<T, N>
2041where
2042 T: PartialEq<U>,
2043{
2044 #[inline]
2045 fn eq(&self, other: &&mut [U]) -> bool {
2046 self[..] == other[..]
2047 }
2048}
2049
2050impl<T, const N: usize> PartialOrd for SmallVec<T, N>
2051where
2052 T: PartialOrd,
2053{
2054 #[inline]
2055 fn partial_cmp(&self, other: &SmallVec<T, N>) -> Option<core::cmp::Ordering> {
2056 self.as_slice().partial_cmp(other.as_slice())
2057 }
2058}
2059
2060impl<T, const N: usize> Ord for SmallVec<T, N>
2061where
2062 T: Ord,
2063{
2064 #[inline]
2065 fn cmp(&self, other: &SmallVec<T, N>) -> core::cmp::Ordering {
2066 self.as_slice().cmp(other.as_slice())
2067 }
2068}
2069
2070impl<T: Hash, const N: usize> Hash for SmallVec<T, N> {
2071 fn hash<H: Hasher>(&self, state: &mut H) {
2072 self.as_slice().hash(state)
2073 }
2074}
2075
2076impl<T, const N: usize> Borrow<[T]> for SmallVec<T, N> {
2077 #[inline]
2078 fn borrow(&self) -> &[T] {
2079 self.as_slice()
2080 }
2081}
2082
2083impl<T, const N: usize> BorrowMut<[T]> for SmallVec<T, N> {
2084 #[inline]
2085 fn borrow_mut(&mut self) -> &mut [T] {
2086 self.as_mut_slice()
2087 }
2088}
2089
2090impl<T, const N: usize> AsRef<[T]> for SmallVec<T, N> {
2091 #[inline]
2092 fn as_ref(&self) -> &[T] {
2093 self.as_slice()
2094 }
2095}
2096
2097impl<T, const N: usize> AsMut<[T]> for SmallVec<T, N> {
2098 #[inline]
2099 fn as_mut(&mut self) -> &mut [T] {
2100 self.as_mut_slice()
2101 }
2102}
2103
2104impl<T: Debug, const N: usize> Debug for SmallVec<T, N> {
2105 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2106 f.debug_list().entries(self.iter()).finish()
2107 }
2108}
2109
2110impl<T: Debug, const N: usize> Debug for IntoIter<T, N> {
2111 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2112 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
2113 }
2114}
2115
2116impl<T: Debug, const N: usize> Debug for Drain<'_, T, N> {
2117 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2118 f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
2119 }
2120}
2121
2122#[cfg(feature = "serde")]
2123#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
2124impl<T, const N: usize> Serialize for SmallVec<T, N>
2125where
2126 T: Serialize,
2127{
2128 fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
2129 let mut state = serializer.serialize_seq(Some(self.len()))?;
2130 for item in self {
2131 state.serialize_element(item)?;
2132 }
2133 state.end()
2134 }
2135}
2136
2137#[cfg(feature = "serde")]
2138#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
2139impl<'de, T, const N: usize> Deserialize<'de> for SmallVec<T, N>
2140where
2141 T: Deserialize<'de>,
2142{
2143 fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
2144 deserializer.deserialize_seq(SmallVecVisitor {
2145 phantom: PhantomData,
2146 })
2147 }
2148}
2149
2150#[cfg(feature = "serde")]
2151struct SmallVecVisitor<T, const N: usize> {
2152 phantom: PhantomData<T>,
2153}
2154
2155#[cfg(feature = "serde")]
2156impl<'de, T, const N: usize> Visitor<'de> for SmallVecVisitor<T, N>
2157where
2158 T: Deserialize<'de>,
2159{
2160 type Value = SmallVec<T, N>;
2161
2162 fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
2163 formatter.write_str("a sequence")
2164 }
2165
2166 fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error>
2167 where
2168 B: SeqAccess<'de>,
2169 {
2170 use serde::de::Error;
2171 let len = seq.size_hint().unwrap_or(0);
2172 let mut values = SmallVec::new();
2173 values.try_reserve(len).map_err(B::Error::custom)?;
2174
2175 while let Some(value) = seq.next_element()? {
2176 values.push(value);
2177 }
2178
2179 Ok(values)
2180 }
2181}
2182
2183#[cfg(feature = "malloc_size_of")]
2184impl<T, const N: usize> MallocShallowSizeOf for SmallVec<T, N> {
2185 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
2186 if self.spilled() {
2187 unsafe { ops.malloc_size_of(self.as_ptr()) }
2188 } else {
2189 0
2190 }
2191 }
2192}
2193
2194#[cfg(feature = "malloc_size_of")]
2195impl<T: MallocSizeOf, const N: usize> MallocSizeOf for SmallVec<T, N> {
2196 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
2197 let mut n = self.shallow_size_of(ops);
2198 for elem in self.iter() {
2199 n += elem.size_of(ops);
2200 }
2201 n
2202 }
2203}
2204
2205#[cfg(feature = "std")]
2206#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
2207impl<const N: usize> io::Write for SmallVec<u8, N> {
2208 #[inline]
2209 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
2210 self.extend_from_slice(buf);
2211 Ok(buf.len())
2212 }
2213
2214 #[inline]
2215 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
2216 self.extend_from_slice(buf);
2217 Ok(())
2218 }
2219
2220 #[inline]
2221 fn flush(&mut self) -> io::Result<()> {
2222 Ok(())
2223 }
2224}
2225
2226#[cfg(feature = "bytes")]
2227unsafe impl<const N: usize> BufMut for SmallVec<u8, N> {
2228 #[inline]
2229 fn remaining_mut(&self) -> usize {
2230 isize::MAX as usize - self.len()
2232 }
2233
2234 #[inline]
2235 unsafe fn advance_mut(&mut self, cnt: usize) {
2236 let len = self.len();
2237 let remaining = self.capacity() - len;
2238
2239 if remaining < cnt {
2240 panic!("advance out of bounds: the len is {remaining} but advancing by {cnt}");
2241 }
2242
2243 self.set_len(len + cnt);
2245 }
2246
2247 #[inline]
2248 fn chunk_mut(&mut self) -> &mut UninitSlice {
2249 if self.capacity() == self.len() {
2250 self.reserve(64); }
2252
2253 let cap = self.capacity();
2254 let len = self.len();
2255
2256 let ptr = self.as_mut_ptr();
2257 unsafe { UninitSlice::from_raw_parts_mut(ptr.add(len), cap - len) }
2261 }
2262
2263 #[inline]
2266 fn put<T: bytes::Buf>(&mut self, mut src: T)
2267 where
2268 Self: Sized,
2269 {
2270 self.reserve(src.remaining());
2272
2273 while src.has_remaining() {
2274 let s = src.chunk();
2275 let l = s.len();
2276 self.extend_from_slice(s);
2277 src.advance(l);
2278 }
2279 }
2280
2281 #[inline]
2282 fn put_slice(&mut self, src: &[u8]) {
2283 self.extend_from_slice(src);
2284 }
2285
2286 #[inline]
2287 fn put_bytes(&mut self, val: u8, cnt: usize) {
2288 let new_len = self.len().saturating_add(cnt);
2290 self.resize(new_len, val);
2291 }
2292}