1use core::cmp::Ordering;
11use core::fmt;
12use core::hash::{Hash, Hasher};
13use core::mem::{ManuallyDrop, MaybeUninit};
14use core::ops::{Deref, DerefMut};
15use core::ptr;
16use core::slice;
17
18pub trait AnyVec<T> {
20 fn as_slice(&self) -> &[T];
22
23 fn len(&self) -> usize {
25 self.as_slice().len()
26 }
27
28 fn is_empty(&self) -> bool {
30 self.as_slice().is_empty()
31 }
32
33 fn get(&self, index: usize) -> Option<&T> {
35 self.as_slice().get(index)
36 }
37
38 fn contains(&self, x: &T) -> bool
40 where
41 T: PartialEq,
42 {
43 self.as_slice().contains(x)
44 }
45
46 fn iter(&self) -> slice::Iter<'_, T> {
48 self.as_slice().iter()
49 }
50}
51
52impl<T> AnyVec<T> for std::vec::Vec<T> {
53 fn as_slice(&self) -> &[T] {
54 self.as_slice()
55 }
56}
57
58impl<T> AnyVec<T> for [T] {
59 fn as_slice(&self) -> &[T] {
60 self
61 }
62}
63
64impl<T, const N: usize> AnyVec<T> for [T; N] {
65 fn as_slice(&self) -> &[T] {
66 self.as_slice()
67 }
68}
69
70impl<T, const N: usize> AnyVec<T> for SmallVec<T, N> {
71 fn as_slice(&self) -> &[T] {
72 self.as_slice()
73 }
74}
75
76pub union VecData<T, const N: usize> {
78 pub stack: ManuallyDrop<[MaybeUninit<T>; N]>,
80 pub heap: ManuallyDrop<std::vec::Vec<T>>,
82}
83
84pub struct SmallVec<T, const N: usize> {
90 len: usize,
91 capacity: usize,
92 on_stack: bool,
93 data: VecData<T, N>,
94}
95
96impl<T, const N: usize> SmallVec<T, N> {
97 pub const MAX_STACK_SIZE: usize = 16 * 1024;
99
100 pub fn new() -> Self {
102 const {
103 assert!(
104 std::mem::size_of::<Self>() <= SmallVec::<T, N>::MAX_STACK_SIZE,
105 "SmallVec is too large! Reduce N."
106 );
107 }
108 Self {
109 len: 0,
110 capacity: N,
111 on_stack: true,
112 data: VecData {
113 stack: ManuallyDrop::new(unsafe { MaybeUninit::uninit().assume_init() }),
114 },
115 }
116 }
117
118 pub fn with_capacity(capacity: usize) -> Self {
120 if capacity <= N {
121 Self::new()
122 } else {
123 let heap_vec = std::vec::Vec::with_capacity(capacity);
124 Self {
125 len: 0,
126 capacity: heap_vec.capacity(),
127 on_stack: false,
128 data: VecData {
129 heap: ManuallyDrop::new(heap_vec),
130 },
131 }
132 }
133 }
134
135 #[inline(always)]
137 pub fn is_on_stack(&self) -> bool {
138 self.on_stack
139 }
140
141 #[inline(always)]
143 pub fn len(&self) -> usize {
144 self.len
145 }
146
147 #[inline(always)]
149 pub fn is_empty(&self) -> bool {
150 self.len == 0
151 }
152
153 #[inline(always)]
156 pub fn capacity(&self) -> usize {
157 self.capacity
158 }
159
160 #[inline(always)]
162 pub fn get(&self, index: usize) -> Option<&T> {
163 if index < self.len {
164 unsafe {
165 let ptr = if self.on_stack {
166 (*self.data.stack).as_ptr() as *const T
167 } else {
168 (*self.data.heap).as_ptr()
169 };
170 Some(&*ptr.add(index))
171 }
172 } else {
173 None
174 }
175 }
176
177 #[inline(always)]
179 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
180 if index < self.len {
181 unsafe {
182 let ptr = if self.on_stack {
183 (*self.data.stack).as_mut_ptr() as *mut T
184 } else {
185 (*self.data.heap).as_mut_ptr()
186 };
187 Some(&mut *ptr.add(index))
188 }
189 } else {
190 None
191 }
192 }
193
194 pub fn reserve(&mut self, additional: usize) {
198 if self.len + additional > self.capacity {
199 unsafe {
200 if self.on_stack {
201 self.spill_to_heap();
202 }
203 (*self.data.heap).reserve(additional);
204 self.capacity = (*self.data.heap).capacity();
205 }
206 }
207 }
208
209 #[inline(always)]
213 pub fn push(&mut self, item: T) {
214 if self.len < self.capacity {
215 unsafe {
216 let ptr = if self.on_stack {
217 (*self.data.stack).as_mut_ptr() as *mut T
218 } else {
219 (*self.data.heap).as_mut_ptr()
220 };
221 ptr::write(ptr.add(self.len), item);
222 self.len += 1;
223 if !self.on_stack {
224 (*self.data.heap).set_len(self.len);
225 }
226 }
227 } else {
228 self.grow_and_push(item);
229 }
230 }
231
232 #[inline(never)]
234 fn grow_and_push(&mut self, item: T) {
235 unsafe {
236 if self.on_stack {
237 self.spill_to_heap();
238 }
239 (*self.data.heap).push(item);
240 self.len = (*self.data.heap).len();
241 self.capacity = (*self.data.heap).capacity();
242 }
243 }
244
245 #[inline(always)]
247 pub fn pop(&mut self) -> Option<T> {
248 if self.len == 0 {
249 None
250 } else {
251 self.len -= 1;
252 unsafe {
253 let ptr = if self.on_stack {
254 (*self.data.stack).as_ptr() as *mut T
255 } else {
256 (*self.data.heap).as_mut_ptr()
257 };
258 let val = ptr::read(ptr.add(self.len));
259 if !self.on_stack {
260 (*self.data.heap).set_len(self.len);
261 }
262 Some(val)
263 }
264 }
265 }
266
267 pub fn insert(&mut self, index: usize, element: T) {
272 assert!(index <= self.len);
273 if self.len == self.capacity {
274 self.grow_for_insert(index, element);
275 } else {
276 unsafe {
277 let ptr = if self.on_stack {
278 (*self.data.stack).as_mut_ptr() as *mut T
279 } else {
280 (*self.data.heap).as_mut_ptr()
281 };
282 ptr::copy(ptr.add(index), ptr.add(index + 1), self.len - index);
283 ptr::write(ptr.add(index), element);
284 self.len += 1;
285 if !self.on_stack {
286 (*self.data.heap).set_len(self.len);
287 }
288 }
289 }
290 }
291
292 #[inline(never)]
294 fn grow_for_insert(&mut self, index: usize, element: T) {
295 unsafe {
296 if self.on_stack {
297 self.spill_to_heap();
298 }
299 (*self.data.heap).insert(index, element);
300 self.len = (*self.data.heap).len();
301 self.capacity = (*self.data.heap).capacity();
302 }
303 }
304
305 pub fn remove(&mut self, index: usize) -> T {
310 assert!(index < self.len);
311 unsafe {
312 let ptr = if self.on_stack {
313 (*self.data.stack).as_mut_ptr() as *mut T
314 } else {
315 (*self.data.heap).as_mut_ptr()
316 };
317 let val = ptr::read(ptr.add(index));
318 ptr::copy(ptr.add(index + 1), ptr.add(index), self.len - index - 1);
319 self.len -= 1;
320 if !self.on_stack {
321 (*self.data.heap).set_len(self.len);
322 }
323 val
324 }
325 }
326
327 pub fn swap_remove(&mut self, index: usize) -> T {
335 assert!(index < self.len);
336 unsafe {
337 let ptr = if self.on_stack {
338 (*self.data.stack).as_mut_ptr() as *mut T
339 } else {
340 (*self.data.heap).as_mut_ptr()
341 };
342 let val = ptr::read(ptr.add(index));
343 let last = ptr::read(ptr.add(self.len - 1));
344 if index < self.len - 1 {
345 ptr::write(ptr.add(index), last);
346 }
347 self.len -= 1;
348 if !self.on_stack {
349 (*self.data.heap).set_len(self.len);
350 }
351 val
352 }
353 }
354
355 pub fn truncate(&mut self, len: usize) {
357 if len < self.len {
358 unsafe {
359 let ptr = if self.on_stack {
360 (*self.data.stack).as_mut_ptr() as *mut T
361 } else {
362 (*self.data.heap).as_mut_ptr()
363 };
364 for i in len..self.len {
365 ptr::drop_in_place(ptr.add(i));
366 }
367 }
368 self.len = len;
369 if !self.on_stack {
370 unsafe { (*self.data.heap).set_len(len) };
371 }
372 }
373 }
374
375 pub fn clear(&mut self) {
377 self.truncate(0);
378 }
379
380 pub fn retain<F>(&mut self, mut f: F)
385 where
386 F: FnMut(&T) -> bool,
387 {
388 let len = self.len;
389 let mut del = 0;
390 unsafe {
391 let ptr = if self.on_stack {
392 (*self.data.stack).as_mut_ptr() as *mut T
393 } else {
394 (*self.data.heap).as_mut_ptr()
395 };
396
397 for i in 0..len {
398 if !f(&*ptr.add(i)) {
399 ptr::drop_in_place(ptr.add(i));
400 del += 1;
401 } else if del > 0 {
402 let val = ptr::read(ptr.add(i));
403 ptr::write(ptr.add(i - del), val);
404 }
405 }
406 self.len -= del;
407 if !self.on_stack {
408 (*self.data.heap).set_len(self.len);
409 }
410 }
411 }
412
413 pub fn shrink_to_fit(&mut self) {
415 if !self.on_stack {
416 unsafe {
417 (*self.data.heap).shrink_to_fit();
418 self.capacity = (*self.data.heap).capacity();
419 }
420 }
421 }
422
423 pub fn into_vec(self) -> std::vec::Vec<T>
425 where
426 T: Clone,
427 {
428 let this = ManuallyDrop::new(self);
429 unsafe {
430 if this.on_stack {
431 let ptr = (*this.data.stack).as_ptr() as *const T;
432 slice::from_raw_parts(ptr, this.len).to_vec()
433 } else {
434 ptr::read(&*this.data.heap)
435 }
436 }
437 }
438
439 #[inline(never)]
440 unsafe fn spill_to_heap(&mut self) {
441 unsafe {
442 let mut heap_vec = std::vec::Vec::with_capacity(self.capacity * 2);
443 let ptr = (*self.data.stack).as_ptr() as *const T;
444 for i in 0..self.len {
445 heap_vec.push(ptr::read(ptr.add(i)));
446 }
447 ptr::write(&mut self.data.heap, ManuallyDrop::new(heap_vec));
448 self.on_stack = false;
449 self.capacity = (*self.data.heap).capacity();
450 }
451 }
452
453 #[inline(always)]
455 pub fn as_slice(&self) -> &[T] {
456 unsafe {
457 let ptr = if self.on_stack {
458 (*self.data.stack).as_ptr() as *const T
459 } else {
460 (*self.data.heap).as_ptr()
461 };
462 slice::from_raw_parts(ptr, self.len)
463 }
464 }
465
466 #[inline(always)]
468 pub fn as_mut_slice(&mut self) -> &mut [T] {
469 unsafe {
470 let ptr = if self.on_stack {
471 (*self.data.stack).as_mut_ptr() as *mut T
472 } else {
473 (*self.data.heap).as_mut_ptr()
474 };
475 slice::from_raw_parts_mut(ptr, self.len)
476 }
477 }
478}
479
480impl<T: Clone, const N: usize> SmallVec<T, N> {
481 pub fn resize(&mut self, new_len: usize, value: T) {
483 let len = self.len;
484 if new_len > len {
485 self.reserve(new_len - len);
486 for _ in len..new_len {
487 self.push(value.clone());
488 }
489 } else {
490 self.truncate(new_len);
491 }
492 }
493
494 pub fn extend_from_slice(&mut self, other: &[T]) {
496 self.reserve(other.len());
497 for item in other {
498 self.push(item.clone());
499 }
500 }
501}
502
503impl<T, const N: usize> Deref for SmallVec<T, N> {
504 type Target = [T];
505 fn deref(&self) -> &Self::Target {
506 self.as_slice()
507 }
508}
509
510impl<T, const N: usize> DerefMut for SmallVec<T, N> {
511 fn deref_mut(&mut self) -> &mut Self::Target {
512 self.as_mut_slice()
513 }
514}
515
516impl<T, const N: usize> Drop for SmallVec<T, N> {
517 fn drop(&mut self) {
518 if self.on_stack {
519 unsafe {
520 let ptr = (*self.data.stack).as_mut_ptr() as *mut T;
521 for i in 0..self.len {
522 ptr::drop_in_place(ptr.add(i));
523 }
524 }
525 } else {
526 unsafe {
527 ManuallyDrop::drop(&mut self.data.heap);
528 }
529 }
530 }
531}
532
533impl<T: Clone, const N: usize> Clone for SmallVec<T, N> {
534 fn clone(&self) -> Self {
535 if self.on_stack {
536 let mut stack_arr: [MaybeUninit<T>; N] = unsafe { MaybeUninit::uninit().assume_init() };
537 let src = self.as_slice();
538 for i in 0..self.len {
539 stack_arr[i] = MaybeUninit::new(src[i].clone());
540 }
541 Self {
542 len: self.len,
543 capacity: N,
544 on_stack: true,
545 data: VecData {
546 stack: ManuallyDrop::new(stack_arr),
547 },
548 }
549 } else {
550 let heap_vec = unsafe { (*self.data.heap).clone() };
551 Self {
552 len: self.len,
553 capacity: heap_vec.capacity(),
554 on_stack: false,
555 data: VecData {
556 heap: ManuallyDrop::new(heap_vec),
557 },
558 }
559 }
560 }
561}
562
563impl<T: fmt::Debug, const N: usize> fmt::Debug for SmallVec<T, N> {
564 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
565 fmt::Debug::fmt(&**self, f)
566 }
567}
568
569impl<T, const N: usize> Default for SmallVec<T, N> {
570 fn default() -> Self {
571 Self::new()
572 }
573}
574
575impl<T: PartialEq, const N: usize> PartialEq for SmallVec<T, N> {
576 fn eq(&self, other: &Self) -> bool {
577 self.as_slice() == other.as_slice()
578 }
579}
580impl<T: Eq, const N: usize> Eq for SmallVec<T, N> {}
581
582impl<T, const N: usize> Extend<T> for SmallVec<T, N> {
583 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
584 for i in iter {
585 self.push(i);
586 }
587 }
588}
589
590impl<T, const N: usize> FromIterator<T> for SmallVec<T, N> {
591 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
592 let mut vec = SmallVec::new();
593 vec.extend(iter);
594 vec
595 }
596}
597
598pub struct SmallVecIntoIter<T, const N: usize> {
602 iter: SmallVecIterEnum<T, N>,
603}
604
605enum SmallVecIterEnum<T, const N: usize> {
606 Stack {
607 data: [MaybeUninit<T>; N],
608 pos: usize,
609 len: usize,
610 },
611 Heap(std::vec::IntoIter<T>),
612}
613
614impl<T, const N: usize> IntoIterator for SmallVec<T, N> {
615 type Item = T;
616 type IntoIter = SmallVecIntoIter<T, N>;
617
618 fn into_iter(self) -> Self::IntoIter {
619 let this = ManuallyDrop::new(self);
620 unsafe {
621 if this.on_stack {
622 SmallVecIntoIter {
623 iter: SmallVecIterEnum::Stack {
624 data: ptr::read(&*this.data.stack),
625 pos: 0,
626 len: this.len,
627 },
628 }
629 } else {
630 let heap_vec = ptr::read(&*this.data.heap);
631 SmallVecIntoIter {
632 iter: SmallVecIterEnum::Heap(heap_vec.into_iter()),
633 }
634 }
635 }
636 }
637}
638
639impl<T, const N: usize> Iterator for SmallVecIntoIter<T, N> {
640 type Item = T;
641 fn next(&mut self) -> Option<Self::Item> {
642 match &mut self.iter {
643 SmallVecIterEnum::Stack { data, pos, len } => {
644 if *pos < *len {
645 let val = unsafe { ptr::read(data[*pos].as_ptr()) };
646 *pos += 1;
647 Some(val)
648 } else {
649 None
650 }
651 }
652 SmallVecIterEnum::Heap(iter) => iter.next(),
653 }
654 }
655
656 fn size_hint(&self) -> (usize, Option<usize>) {
657 match &self.iter {
658 SmallVecIterEnum::Stack { pos, len, .. } => {
659 let remaining = len - pos;
660 (remaining, Some(remaining))
661 }
662 SmallVecIterEnum::Heap(iter) => iter.size_hint(),
663 }
664 }
665}
666
667impl<T, const N: usize> ExactSizeIterator for SmallVecIntoIter<T, N> {}
668
669impl<T, const N: usize> Drop for SmallVecIntoIter<T, N> {
670 fn drop(&mut self) {
671 if let SmallVecIterEnum::Stack { data, pos, len } = &mut self.iter {
672 for i in *pos..*len {
673 unsafe {
674 ptr::drop_in_place(data[i].as_mut_ptr());
675 }
676 }
677 }
678 }
679}
680
681impl<T: Hash, const N: usize> Hash for SmallVec<T, N> {
682 fn hash<H: Hasher>(&self, state: &mut H) {
683 self.as_slice().hash(state);
684 }
685}
686
687impl<T: PartialOrd, const N: usize> PartialOrd for SmallVec<T, N> {
688 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
689 self.as_slice().partial_cmp(other.as_slice())
690 }
691}
692
693impl<T: Ord, const N: usize> Ord for SmallVec<T, N> {
694 fn cmp(&self, other: &Self) -> Ordering {
695 self.as_slice().cmp(other.as_slice())
696 }
697}
698
699impl<T, const N: usize> AsRef<[T]> for SmallVec<T, N> {
700 fn as_ref(&self) -> &[T] {
701 self.as_slice()
702 }
703}
704
705impl<T, const N: usize> std::borrow::Borrow<[T]> for SmallVec<T, N> {
706 fn borrow(&self) -> &[T] {
707 self.as_slice()
708 }
709}
710
711impl<T, const N: usize> std::borrow::BorrowMut<[T]> for SmallVec<T, N> {
712 fn borrow_mut(&mut self) -> &mut [T] {
713 self.as_mut_slice()
714 }
715}
716
717impl<T, const N: usize> AsMut<[T]> for SmallVec<T, N> {
718 fn as_mut(&mut self) -> &mut [T] {
719 self.as_mut_slice()
720 }
721}
722
723impl<T, const N: usize> AsRef<SmallVec<T, N>> for SmallVec<T, N> {
724 fn as_ref(&self) -> &SmallVec<T, N> {
725 self
726 }
727}
728
729impl<T, const N: usize> core::ops::Index<usize> for SmallVec<T, N> {
730 type Output = T;
731 #[inline(always)]
732 fn index(&self, index: usize) -> &Self::Output {
733 self.get(index).expect("index out of bounds")
734 }
735}
736
737impl<T, const N: usize> core::ops::IndexMut<usize> for SmallVec<T, N> {
738 #[inline(always)]
739 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
740 self.get_mut(index).expect("index out of bounds")
741 }
742}
743
744impl<T, const N: usize> core::ops::Index<core::ops::RangeFull> for SmallVec<T, N> {
745 type Output = [T];
746 #[inline(always)]
747 fn index(&self, _: core::ops::RangeFull) -> &Self::Output {
748 self.as_slice()
749 }
750}
751
752impl<T, const N: usize> core::ops::IndexMut<core::ops::RangeFull> for SmallVec<T, N> {
753 #[inline(always)]
754 fn index_mut(&mut self, _: core::ops::RangeFull) -> &mut Self::Output {
755 self.as_mut_slice()
756 }
757}
758
759impl<T, const N: usize> SmallVec<T, N> {
760 pub fn extend_from_any<V: AnyVec<T> + ?Sized>(&mut self, other: &V)
762 where
763 T: Clone,
764 {
765 self.extend_from_slice(other.as_slice());
766 }
767
768 pub fn eq_any<V: AnyVec<T> + ?Sized>(&self, other: &V) -> bool
770 where
771 T: PartialEq,
772 {
773 self.as_slice() == other.as_slice()
774 }
775
776 pub fn cmp_any<V: AnyVec<T> + ?Sized>(&self, other: &V) -> Ordering
778 where
779 T: Ord,
780 {
781 self.as_slice().cmp(other.as_slice())
782 }
783
784 pub fn starts_with_any<V: AnyVec<T> + ?Sized>(&self, other: &V) -> bool
786 where
787 T: PartialEq,
788 {
789 self.as_slice().starts_with(other.as_slice())
790 }
791
792 pub fn ends_with_any<V: AnyVec<T> + ?Sized>(&self, other: &V) -> bool
794 where
795 T: PartialEq,
796 {
797 self.as_slice().ends_with(other.as_slice())
798 }
799
800 pub fn contains_subsequence<V: AnyVec<T> + ?Sized>(&self, other: &V) -> bool
802 where
803 T: PartialEq,
804 {
805 let other_slice = other.as_slice();
806 if other_slice.is_empty() {
807 return true;
808 }
809 if other_slice.len() > self.len {
810 return false;
811 }
812 self.as_slice()
813 .windows(other_slice.len())
814 .any(|w| w == other_slice)
815 }
816}
817
818#[cfg(test)]
819mod vec_basic_tests {
820 use super::*;
821
822 #[test]
823 fn test_vec_traits_borrow() {
824 use std::borrow::{Borrow, BorrowMut};
825 let mut v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3]);
826
827 let b: &[i32] = v.borrow();
829 assert_eq!(b, &[1, 2, 3]);
830
831 let b_mut: &mut [i32] = v.borrow_mut();
833 b_mut[0] = 10;
834 assert_eq!(v.as_slice(), &[10, 2, 3]);
835 }
836
837 #[test]
838 fn test_vec_stack_push_pop_basic() {
839 let mut vec: SmallVec<i32, 4> = SmallVec::new();
840 vec.push(1);
841 vec.push(2);
842 vec.push(3);
843 assert!(vec.is_on_stack());
844 assert_eq!(vec.len(), 3);
845 assert_eq!(vec[0], 1);
846 assert_eq!(vec.pop(), Some(3));
847 assert_eq!(vec.len(), 2);
848 }
849
850 #[test]
851 fn test_vec_spill_trigger_on_push() {
852 let mut vec: SmallVec<i32, 2> = SmallVec::new();
853 vec.push(1);
854 vec.push(2);
855 assert!(vec.is_on_stack());
856 vec.push(3);
857 assert!(!vec.is_on_stack());
858 assert_eq!(vec.len(), 3);
859 assert_eq!(vec[0], 1);
860 assert_eq!(vec[2], 3);
861 assert!(vec.capacity() >= 4);
862 }
863
864 #[test]
865 fn test_vec_spill_trigger_on_insert() {
866 let mut vec: SmallVec<i32, 2> = SmallVec::new();
867 vec.push(1);
868 vec.push(3);
869 vec.insert(1, 2);
870 assert!(!vec.is_on_stack());
871 assert_eq!(vec.as_slice(), &[1, 2, 3]);
872 }
873
874 #[test]
875 fn test_vec_stack_insert_remove_swap() {
876 let mut vec: SmallVec<i32, 4> = SmallVec::from_iter([10, 20, 30]);
877 vec.insert(1, 15);
878 assert_eq!(vec[1], 15);
879 let removed = vec.remove(2);
880 assert_eq!(removed, 20);
881 assert_eq!(vec.as_slice(), &[10, 15, 30]);
882 vec.push(40);
883 let swapped = vec.swap_remove(0);
884 assert_eq!(swapped, 10);
885 assert_eq!(vec.as_slice(), &[40, 15, 30]);
886 }
887
888 #[test]
889 fn test_vec_any_storage_retain() {
890 let mut vec: SmallVec<i32, 8> = SmallVec::from_iter(0..10);
891 assert!(!vec.is_on_stack());
892 vec.retain(|&x| x % 2 == 0);
893 assert_eq!(vec.as_slice(), &[0, 2, 4, 6, 8]);
894 let mut vec_stack: SmallVec<i32, 8> = SmallVec::from_iter(0..6);
895 assert!(vec_stack.is_on_stack());
896 vec_stack.retain(|&x| x % 2 != 0);
897 assert_eq!(vec_stack.as_slice(), &[1, 3, 5]);
898 }
899
900 #[test]
901 fn test_vec_any_storage_resize_clone() {
902 let mut vec: SmallVec<i32, 4> = SmallVec::new();
903 vec.resize(2, 0);
904 assert!(vec.is_on_stack());
905 let vec2 = vec.clone();
906 assert_eq!(vec, vec2);
907 vec.resize(10, 5);
908 assert!(!vec.is_on_stack());
909 assert_eq!(vec.len(), 10);
910 assert_eq!(vec[9], 5);
911 }
912
913 #[test]
914 fn test_vec_traits_into_iter_basic() {
915 let vec: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3]);
916 let collected: Vec<i32> = vec.into_iter().map(|x| x * 2).collect();
917 assert_eq!(collected, vec![2, 4, 6]);
918 }
919
920 #[test]
921 fn test_vec_any_storage_drop_behavior() {
922 use std::cell::RefCell;
923 use std::rc::Rc;
924 let counter = Rc::new(RefCell::new(0));
925 struct Dropper(Rc<RefCell<i32>>);
926 impl Drop for Dropper {
927 fn drop(&mut self) {
928 *self.0.borrow_mut() += 1;
929 }
930 }
931 impl Clone for Dropper {
932 fn clone(&self) -> Self {
933 Dropper(self.0.clone())
934 }
935 }
936 {
937 let mut vec: SmallVec<Dropper, 2> = SmallVec::new();
938 vec.push(Dropper(counter.clone()));
939 vec.push(Dropper(counter.clone()));
940 }
941 assert_eq!(*counter.borrow(), 2);
942 *counter.borrow_mut() = 0;
943 {
944 let mut vec: SmallVec<Dropper, 2> = SmallVec::new();
945 vec.push(Dropper(counter.clone()));
946 vec.push(Dropper(counter.clone()));
947 vec.push(Dropper(counter.clone()));
948 }
949 assert_eq!(*counter.borrow(), 3);
950 }
951
952 #[test]
953 fn test_vec_traits_anyvec_inspection() {
954 let sv: SmallVec<i32, 4> = SmallVec::from_iter([10, 20, 30]);
955 assert_eq!(sv.len(), 3);
956 assert!(!sv.is_empty());
957 assert_eq!(sv.as_slice().first(), Some(&10));
958 assert_eq!(sv.get(1), Some(&20));
959 let sum: i32 = sv.iter().sum();
960 assert_eq!(sum, 60);
961 }
962
963 #[test]
964 fn test_vec_traits_interop_comparison() {
965 let sv: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3]);
966 let std_vec = vec![1, 2, 3];
967 assert!(sv.eq_any(&std_vec));
968 assert_eq!(sv.cmp_any(&std_vec), std::cmp::Ordering::Equal);
969 let arr = [1, 2, 4];
970 assert!(!sv.eq_any(&arr));
971 assert_eq!(sv.cmp_any(&arr), std::cmp::Ordering::Less);
972 }
973
974 #[test]
975 fn test_vec_traits_interop_searching() {
976 let sv: SmallVec<i32, 8> = SmallVec::from_iter([1, 2, 3, 4, 5]);
977 assert!(sv.starts_with_any(&vec![1, 2]));
978 assert!(sv.ends_with_any(&[4, 5]));
979 assert!(sv.contains_subsequence(&vec![3, 4]));
980 assert!(!sv.contains_subsequence(&[3, 5]));
981 }
982
983 #[test]
984 fn test_vec_any_storage_with_capacity() {
985 let v: SmallVec<i32, 4> = SmallVec::with_capacity(2);
986 assert!(v.is_on_stack());
987 let v2: SmallVec<i32, 4> = SmallVec::with_capacity(10);
988 assert!(!v2.is_on_stack());
989 assert!(v2.capacity() >= 10);
990 }
991
992 #[test]
993 fn test_vec_any_storage_shrink_to_fit() {
994 let mut v: SmallVec<i32, 4> = SmallVec::with_capacity(10);
995 v.push(1);
996 v.shrink_to_fit();
997 assert!(v.capacity() >= 1);
998 let mut v_stack: SmallVec<i32, 4> = SmallVec::new();
999 v_stack.push(1);
1000 v_stack.shrink_to_fit();
1001 assert_eq!(v_stack.capacity(), 4);
1002 }
1003
1004 #[test]
1005 fn test_vec_any_storage_extend_from_slice() {
1006 let mut v: SmallVec<i32, 4> = SmallVec::new();
1007 v.extend_from_slice(&[1, 2]);
1008 assert!(v.is_on_stack());
1009 assert_eq!(v.as_slice(), &[1, 2]);
1010 v.extend_from_slice(&[3, 4, 5]);
1011 assert!(!v.is_on_stack());
1012 assert_eq!(v.as_slice(), &[1, 2, 3, 4, 5]);
1013 }
1014
1015 #[test]
1016 fn test_vec_any_storage_clear_and_empty() {
1017 let mut v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3, 4, 5]);
1018 assert!(!v.is_on_stack());
1019 v.clear();
1020 assert!(v.is_empty());
1021 let mut v_stack: SmallVec<i32, 4> = SmallVec::from_iter([1, 2]);
1022 v_stack.clear();
1023 assert!(v_stack.is_empty());
1024 }
1025
1026 #[test]
1027 fn test_vec_traits_partial_eq_cross_storage() {
1028 let v1: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3]);
1029 let v2: SmallVec<i32, 2> = SmallVec::from_iter([1, 2, 3]);
1030 assert!(v1.is_on_stack());
1031 assert!(!v2.is_on_stack());
1032 assert_eq!(&v1[..], &v2[..]);
1033 }
1034
1035 #[test]
1036 fn test_vec_any_storage_as_mut_and_deref_mut() {
1037 let mut v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3]);
1038 v[0] = 10;
1039 assert_eq!(v[0], 10);
1040 v.as_mut_slice()[1] = 20;
1041 assert_eq!(v[1], 20);
1042 }
1043}
1044
1045#[cfg(test)]
1046mod vec_coverage_tests {
1047 use super::*;
1048
1049 #[test]
1050 fn test_any_vec_trait_std_and_slice_implementations() {
1051 let v_std = vec![1, 2, 3];
1052 assert_eq!(AnyVec::len(&v_std), 3);
1053 assert!(!AnyVec::is_empty(&v_std));
1054 assert_eq!(AnyVec::get(&v_std, 1), Some(&2));
1055 assert!(v_std.contains(&2));
1056
1057 let arr = [1, 2, 3];
1058 assert_eq!(AnyVec::len(&arr), 3);
1059
1060 let slice: &[i32] = &[1, 2];
1061 assert_eq!(AnyVec::len(slice), 2);
1062 }
1063
1064 #[test]
1065 fn test_small_vec_empty_and_oob_access() {
1066 let mut v: SmallVec<i32, 2> = SmallVec::new();
1067 assert_eq!(v.get(0), None);
1068 assert_eq!(v.get_mut(0), None);
1069 assert_eq!(v.pop(), None);
1070
1071 v.push(1);
1072 v.push(2);
1073 v.push(3); assert_eq!(v.get(3), None);
1075 assert_eq!(v.get_mut(3), None);
1076 }
1077
1078 #[test]
1079 fn test_small_vec_heap_insertion_removal_and_swap() {
1080 let mut v: SmallVec<i32, 2> = SmallVec::new();
1081 v.extend([1, 2, 3]); v.insert(1, 10);
1084 assert_eq!(v.as_slice(), &[1, 10, 2, 3]);
1085
1086 assert_eq!(v.remove(1), 10);
1087 assert_eq!(v.as_slice(), &[1, 2, 3]);
1088
1089 assert_eq!(v.swap_remove(0), 1);
1090 assert_eq!(v.as_slice(), &[3, 2]);
1091 }
1092
1093 #[test]
1094 fn test_small_vec_conversion_to_std_vec() {
1095 let v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2]);
1096 let v_std = v.into_vec();
1097 assert_eq!(v_std, vec![1, 2]);
1098
1099 let v2: SmallVec<i32, 2> = SmallVec::from_iter([1, 2, 3]);
1100 let v2_std = v2.into_vec();
1101 assert_eq!(v2_std, vec![1, 2, 3]);
1102 }
1103
1104 #[test]
1105 fn test_small_vec_resizing_and_truncation() {
1106 let mut v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3, 4]);
1107 v.resize(2, 0);
1108 assert_eq!(v.len(), 2);
1109 }
1110
1111 #[test]
1112 fn test_small_vec_hashing_comparison_and_range_indexing() {
1113 let mut v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3]);
1114 let mut v2 = v.clone();
1115 assert_eq!(v, v2);
1116
1117 v2.push(4);
1118 assert!(v < v2);
1119 assert_eq!(v.cmp(&v2), Ordering::Less);
1120
1121 let mut h = std::collections::hash_map::DefaultHasher::new();
1122 v.hash(&mut h);
1123
1124 assert_eq!(&v[..], &[1, 2, 3]);
1126 v[..].as_mut(); }
1128
1129 #[test]
1130 fn test_small_vec_any_vec_subsequence_and_prefix_checks() {
1131 let v: SmallVec<i32, 4> = SmallVec::from_iter([1, 2, 3, 4]);
1132 let other: SmallVec<i32, 4> = SmallVec::from_iter([1, 2]);
1133
1134 assert!(v.eq_any(&v));
1135 assert_eq!(v.cmp_any(&other), Ordering::Greater);
1136 assert!(v.starts_with_any(&other));
1137 assert!(v.ends_with_any(&SmallVec::<i32, 4>::from_iter([3, 4])));
1138 assert!(v.contains_subsequence(&SmallVec::<i32, 4>::from_iter([2, 3])));
1139 assert!(!v.contains_subsequence(&SmallVec::<i32, 4>::from_iter([5])));
1140 assert!(v.contains_subsequence(&[])); assert!(!SmallVec::<i32, 4>::from_iter([1]).contains_subsequence(&[1, 2]));
1142 }
1143
1144 #[test]
1145 fn test_small_vec_heap_into_iterator_size_hint() {
1146 let v: SmallVec<i32, 2> = SmallVec::from_iter([1, 2, 3]);
1147 let it = v.into_iter();
1148 assert_eq!(it.size_hint().0, 3);
1149 let collected: Vec<_> = it.collect();
1150 assert_eq!(collected, vec![1, 2, 3]);
1151 }
1152}