1#![allow(incomplete_features)]
61#![cfg_attr(feature = "default-size", feature(generic_const_exprs))]
62#![cfg_attr(feature = "use-specialization", feature(min_specialization))]
63
64#![no_std]
65
66extern crate alloc;
67use alloc::vec::Vec;
68use alloc::boxed::Box;
69
70use core::mem::{self, ManuallyDrop, MaybeUninit};
71use core::ops::{Deref, DerefMut};
72use core::ptr::NonNull;
73use core::{fmt, ptr};
74use core::slice;
75
76mod raw;
77use raw::RawVec;
78
79union TinyVecInner<T, const N: usize> {
80 stack: ManuallyDrop<[MaybeUninit<T>; N]>,
81 raw: RawVec<T>,
82}
83
84impl<T, const N: usize> TinyVecInner<T, N> {
85
86 #[inline(always)]
87 const unsafe fn as_ptr_stack(&self) -> *const T {
88 unsafe { &self.stack as *const _ as *const T }
89 }
90
91 #[inline(always)]
92 const unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
93 unsafe { &mut self.stack as *mut _ as *mut T }
94 }
95
96 #[inline(always)]
97 const unsafe fn as_ptr_heap(&self) -> *const T {
98 unsafe { self.raw.ptr.as_ptr() }
99 }
100
101 #[inline(always)]
102 const unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
103 unsafe { self.raw.ptr.as_ptr() }
104 }
105}
106
107#[repr(transparent)]
108struct Length(usize);
109
110impl Length {
111 #[inline(always)]
112 const fn new_stack(len: usize) -> Self {
113 Self(len << 1)
114 }
115
116 #[inline(always)]
117 const fn new_heap(len: usize) -> Self {
118 Self(len << 1 | 0b1)
119 }
120
121 #[inline(always)]
122 const fn is_stack(&self) -> bool {
123 (self.0 & 0b1) == 0
124 }
125
126 #[inline(always)]
127 const fn set_heap(&mut self) {
128 self.0 |= 0b1;
129 }
130
131 #[inline(always)]
132 const fn set_stack(&mut self) {
133 self.0 &= 0b0;
134 }
135
136 #[inline(always)]
137 const fn set(&mut self, n: usize) {
138 self.0 &= 0b1;
139 self.0 |= n << 1;
140 }
141
142 #[inline(always)]
143 const fn get(&self) -> usize {
144 self.0 >> 1
145 }
146
147 #[inline(always)]
148 const fn add(&mut self, n: usize) {
149 self.0 += n << 1;
150 }
151
152 #[inline(always)]
153 const fn sub(&mut self, n: usize) {
154 self.0 -= n << 1;
155 }
156}
157
158#[macro_export]
173macro_rules! tinyvec {
174 ($elem:expr; $n:expr) => ({
175 let mut tv = $crate::TinyVec::new();
176 tv.resize($n, $elem);
177 tv
178 });
179 ($($x:expr),*$(,)?) => ({
180 $crate::TinyVec::from(&[ $( $x ,)*])
181 });
182}
183
184pub const fn n_elements_for_stack<T>() -> usize {
200 mem::size_of::<RawVec<T>>() / mem::size_of::<T>()
201}
202
203pub struct TinyVec<T,
205 #[cfg(not(feature = "default-size"))]
206 const N: usize,
207
208 #[cfg(feature = "default-size")]
209 const N: usize = { n_elements_for_stack::<T>() },
210> {
211 inner: TinyVecInner<T, N>,
212 len: Length,
213}
214
215impl<T, const N: usize> TinyVec<T, N> {
216
217 unsafe fn switch_to_heap(&mut self, n: usize) {
218 let mut vec = RawVec::new();
219 vec.expand_if_needed(0, self.len.get() + n);
220 unsafe {
221 let src = self.inner.as_ptr_stack();
222 let dst = vec.ptr.as_ptr();
223 ptr::copy(
224 src,
225 dst,
226 self.len.get()
227 );
228 self.inner.raw = vec;
229 }
230 self.len.set_heap();
231 }
232
233 unsafe fn switch_to_stack(&mut self) {
234 let mut rv = unsafe { self.inner.raw };
235
236 let stack = [ const { MaybeUninit::uninit() }; N ];
237
238 unsafe {
239 let src = rv.ptr.as_ptr();
240 let dst = stack.as_ptr() as *mut T;
241 ptr::copy(
242 src,
243 dst,
244 self.len.get()
245 );
246
247 rv.destroy();
248 }
249
250 self.inner.stack = ManuallyDrop::new(stack);
251 self.len.set_stack();
252 }
253}
254
255impl<T, const N: usize> TinyVec<T, N> {
256
257 pub const fn new() -> Self {
259 let stack = [ const { MaybeUninit::uninit() }; N ];
260 Self {
261 inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
262 len: Length::new_stack(0),
263 }
264 }
265
266 pub fn with_capacity(cap: usize) -> Self {
268 let mut len = Length(0);
269 let inner = if cap <= N {
270 let s = [ const { MaybeUninit::uninit() }; N ];
271 TinyVecInner { stack: ManuallyDrop::new(s) }
272 } else {
273 len.set_heap();
274 TinyVecInner { raw: RawVec::with_capacity(cap) }
275 };
276
277 Self { inner, len }
278 }
279
280 pub fn from_array<const M: usize>(arr: [T; M]) -> Self {
292 let arr = ManuallyDrop::new(arr);
293 let mut tv = Self::with_capacity(M);
294
295 let src = arr.as_ptr();
296 let dst = tv.as_mut_ptr();
297
298 unsafe {
299 ptr::copy(src, dst, M);
300 tv.set_len(M);
301 }
302
303 tv
304 }
305
306 pub const fn from_array_eq_size(arr: [T; N]) -> Self {
319 let mut tv = Self::new();
320
321 let src = arr.as_ptr();
322 let dst = tv.as_mut_ptr();
323
324 unsafe {
325 ptr::copy(src, dst, N);
326 tv.set_len(N);
327 }
328
329 mem::forget(arr);
330 tv
331 }
332
333 pub fn from_vec(mut vec: Vec<T>) -> Self {
357 let mut tv = Self::with_capacity(vec.len());
358 let dst = tv.as_mut_ptr();
359 let src = vec.as_ptr();
360 unsafe {
361 ptr::copy(
362 src,
363 dst,
364 vec.len()
365 );
366 vec.set_len(0);
367 }
368 tv
369 }
370
371 pub fn from_vec_reuse_buffer(vec: Vec<T>) -> Self {
393 let mut vec = ManuallyDrop::new(vec);
394
395 let ptr = vec.as_mut_ptr();
396 let ptr = unsafe { NonNull::new_unchecked(ptr) };
397 let len = Length::new_heap(vec.len());
398 let cap = vec.capacity();
399
400 let raw = RawVec {
401 cap,
402 ptr,
403 };
404
405 let inner = TinyVecInner { raw };
406 Self {
407 inner,
408 len
409 }
410 }
411
412 pub fn from_tiny_vec<const M: usize>(mut vec: TinyVec<T, M>) -> Self {
430 let len = vec.len();
431 let mut tv = Self::with_capacity(len);
432
433 let src = vec.as_ptr();
434 let dst = tv.as_mut_ptr();
435
436 unsafe {
437 ptr::copy(src, dst, len);
438 vec.set_len(0);
439 tv.set_len(len);
440 }
441
442 tv
443 }
444
445 pub fn from_slice(slice: &[T]) -> Self
451 where
452 T: Clone
453 {
454 let mut v = Self::with_capacity(slice.len());
455 v.extend_from_slice(slice);
456 v
457 }
458
459 pub fn from_slice_copied(slice: &[T]) -> Self
467 where
468 T: Copy
469 {
470 let mut v = Self::with_capacity(slice.len());
471 v.extend_from_slice_copied(slice);
472 v
473 }
474
475 #[inline]
477 pub const fn len(&self) -> usize { self.len.get() }
478
479 #[inline]
481 pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
482
483 #[inline]
485 pub const fn capacity(&self) -> usize {
486 if self.len.is_stack() {
487 N
488 } else {
489 unsafe { self.inner.raw.cap }
490 }
491 }
492
493 #[inline]
512 pub const fn lives_on_stack(&self) -> bool { self.len.is_stack() }
513
514 #[inline]
516 pub const fn as_ptr(&self) -> *const T {
517 unsafe {
518 if self.len.is_stack() {
519 self.inner.as_ptr_stack()
520 } else {
521 self.inner.as_ptr_heap()
522 }
523 }
524 }
525
526 #[inline]
528 pub const fn as_mut_ptr(&mut self) -> *mut T {
529 unsafe {
530 if self.len.is_stack() {
531 self.inner.as_ptr_stack_mut()
532 } else {
533 self.inner.as_ptr_heap_mut()
534 }
535 }
536 }
537
538 #[inline]
540 pub const fn as_non_null(&mut self) -> NonNull<T> {
541 unsafe {
542 NonNull::new_unchecked(self.as_mut_ptr())
543 }
544 }
545
546 #[inline]
548 pub const fn as_slice(&self) -> &[T] {
549 unsafe {
550 slice::from_raw_parts(self.as_ptr(), self.len.get())
551 }
552 }
553
554 #[inline]
556 pub const fn as_mut_slice(&mut self) -> &mut [T] {
557 unsafe {
558 slice::from_raw_parts_mut(self.as_mut_ptr(), self.len.get())
559 }
560 }
561
562 pub fn reserve(&mut self, n: usize) {
564 if self.len.is_stack() {
565 if self.len.get() + n > N {
566 unsafe {
567 self.switch_to_heap(n);
568 }
569 }
570 } else {
571 unsafe {
572 self.inner.raw.expand_if_needed(self.len.get(), n);
573 }
574 }
575 }
576
577 #[inline]
579 pub fn push(&mut self, elem: T) {
580 self.reserve(1);
581 unsafe { self.push_unchecked(elem); }
582 }
583
584 pub unsafe fn push_unchecked(&mut self, elem: T) {
592 unsafe {
593 let dst = self.as_mut_ptr().add(self.len.get());
594 dst.write(elem);
595 }
596 self.len.add(1);
597 }
598
599 pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
603 if self.len.get() < self.capacity() {
604 unsafe { self.push_unchecked(val); }
605 Ok(())
606 } else {
607 Err(val)
608 }
609 }
610 pub fn pop(&mut self) -> Option<T> {
612 if self.len.get() == 0 {
613 None
614 } else {
615 self.len.sub(1);
616 let val =
617 unsafe {
618 self.as_ptr().add(self.len.get()).read()
619 };
620 Some(val)
621 }
622 }
623
624 pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
626 if index > self.len.get() { return Err(elem) }
627 unsafe { self.insert_unckecked(index, elem); }
628 Ok(())
629 }
630
631 pub unsafe fn insert_unckecked(&mut self, index: usize, elem: T) {
635 self.reserve(1);
636
637 unsafe {
638 ptr::copy(
639 self.as_ptr().add(index),
640 self.as_mut_ptr().add(index + 1),
641 self.len.get() - index,
642 );
643 self.as_mut_ptr().add(index).write(elem);
644 }
645 self.len.add(1);
646 }
647
648 pub fn resize(&mut self, cap: usize, elem: T)
650 where
651 T: Clone
652 {
653 if cap < self.len() {
654 self.truncate(cap);
655 } else {
656 let n = cap - self.len();
657 self.reserve(n);
658
659 unsafe {
660 let mut ptr = self.as_mut_ptr().add(self.len());
661 let len = &mut self.len;
662
663 for _ in 1..n {
664 ptr::write(ptr, elem.clone());
665 ptr = ptr.add(1);
666 len.add(1);
667 }
668
669 if n > 0 {
670 ptr::write(ptr, elem);
671 len.add(1);
672 }
673
674 }
675 }
676 }
677
678 pub fn resize_with<F>(&mut self, cap: usize, mut f: F)
697 where
698 F: FnMut() -> T
699 {
700 if cap < self.len() {
701 self.truncate(cap);
702 } else {
703 let n = cap - self.len();
704 self.reserve(n);
705
706 unsafe {
707 let mut ptr = self.as_mut_ptr().add(self.len());
708 let len = &mut self.len;
709
710 for _ in 0..n {
711 ptr::write(ptr, f());
712 ptr = ptr.add(1);
713 len.add(1);
714 }
715 }
716 }
717 }
718
719 pub unsafe fn resize_zeroed(&mut self, cap: usize) {
743 if cap < self.len() {
744 self.truncate(cap);
745 } else {
746 let n = cap - self.len();
747 self.reserve(n);
748 unsafe {
749 let ptr = self.as_mut_ptr().add(self.len());
750 ptr.write_bytes(0, n);
751 }
752 self.len.add(n);
753 }
754 }
755
756 pub fn reserve_exact(&mut self, n: usize) {
758 if self.len.is_stack() {
759 if self.len.get() + n > N {
760 unsafe { self.switch_to_heap(n); }
761 }
762 } else {
763 let vec = unsafe { &mut self.inner.raw };
764 let len = self.len.get();
765 let new_cap = vec.cap.max(len + n);
766 vec.expand_if_needed_exact(len, new_cap);
767 }
768 }
769
770 pub unsafe fn remove_unchecked(&mut self, index: usize) -> T {
773 debug_assert!(index < self.len.get(), "Index is >= than {}, this will trigger UB", self.len.get());
774
775 unsafe {
776 self.len.sub(1);
777 let result = self.as_mut_ptr().add(index).read();
778 ptr::copy(
779 self.as_ptr().add(index + 1),
780 self.as_mut_ptr().add(index),
781 self.len.get() - index,
782 );
783 result
784 }
785 }
786
787 #[inline]
788 pub fn remove(&mut self, index: usize) -> Option<T> {
789 if index >= self.len.get() { return None }
790 Some(unsafe { self.remove_unchecked(index) })
792 }
793
794 #[inline]
799 pub fn swap(&mut self, a: usize, b: usize) {
800 self.swap_checked(a, b).unwrap_or_else(|n| {
801 panic!("Index {n} out of bounds")
802 });
803 }
804
805 pub const fn swap_checked(&mut self, a: usize, b: usize) -> Result<(),usize> {
810 if a >= self.len.get() {
811 return Err(a)
812 };
813 if b >= self.len.get() {
814 return Err(b)
815 };
816 unsafe { self.swap_unchecked(a, b); }
817 Ok(())
818 }
819
820 pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
826 unsafe {
827 let ap = self.as_mut_ptr().add(a);
828 let bp = self.as_mut_ptr().add(b);
829 ptr::swap(ap, bp);
830 }
831 }
832
833 pub fn swap_remove(&mut self, index: usize) -> Option<T> {
835 if index >= self.len.get() {
836 None
837 } else if index == self.len.get() - 1 {
838 self.pop()
839 } else {
840 unsafe { self.swap_unchecked(index, self.len.get() - 1) }
841 self.pop()
842 }
843 }
844
845 #[inline]
847 pub fn shrink_to_fit(&mut self) {
848 if self.len.is_stack() { return }
849
850 if self.len.get() <= N {
851 unsafe { self.switch_to_stack(); }
852 } else {
853 unsafe { self.inner.raw.shrink_to_fit(self.len.get()); };
854 }
855 }
856
857 #[inline]
867 pub const unsafe fn set_len(&mut self, len: usize) {
868 self.len.set(len);
869 }
870
871 pub fn truncate(&mut self, len: usize) {
874 if len < self.len.get() {
875 for e in &mut self[len..] {
876 unsafe { ptr::drop_in_place(e) }
877 }
878 unsafe { self.set_len(len); }
879 }
880 }
881
882 pub fn extend_from_slice(&mut self, s: &[T])
889 where
890 T: Clone
891 {
892 self.extend(s.iter().map(Clone::clone));
893 }
894
895 pub fn extend_from_slice_copied(&mut self, s: &[T])
903 where
904 T: Copy
905 {
906 let len = s.len();
907 let src = s.as_ptr();
908
909 self.reserve(len);
910
911 unsafe {
912 ptr::copy(
913 src,
914 self.as_mut_ptr().add(self.len.get()),
915 len
916 )
917 }
918
919 self.len.add(len);
920 }
921
922 pub fn into_boxed_slice(self) -> Box<[T]> {
934 let mut vec = ManuallyDrop::new(self);
935
936 if vec.lives_on_stack() {
937 unsafe { vec.switch_to_heap(0) };
938 }
939 debug_assert!(!vec.lives_on_stack());
940
941 let len = vec.len();
942 unsafe { vec.inner.raw.shrink_to_fit(len); }
943 debug_assert_eq!(len, vec.capacity());
944
945 let ptr = vec.as_mut_ptr();
946 unsafe {
947 let slice = slice::from_raw_parts_mut(ptr, len);
948 Box::from_raw(slice)
949 }
950 }
951
952 pub fn into_vec(self) -> Vec<T> {
964 let mut vec = ManuallyDrop::new(self);
965
966 if vec.lives_on_stack() {
967 unsafe { vec.switch_to_heap(0) };
968 }
969
970 let ptr = vec.as_mut_ptr();
971 let len = vec.len();
972 let cap = vec.capacity();
973
974 unsafe { Vec::from_raw_parts(ptr, len, cap) }
975 }
976}
977
978impl<T, const N: usize> Extend<T> for TinyVec<T, N> {
979 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
980 let iter = iter.into_iter();
981
982 let (min, max) = iter.size_hint();
983 let reserve = match max {
984 Some(max) => max,
985 None => min,
986 };
987
988 if reserve > 0 {
989 self.reserve(reserve);
990 }
991
992 for elem in iter {
993 unsafe { self.push_unchecked(elem); }
994 }
995 }
996}
997
998#[cfg(feature = "use-specialization")]
999macro_rules! maybe_default {
1000 ($($t:tt)*) => {
1001 default $($t)*
1002 };
1003}
1004
1005#[cfg(not(feature = "use-specialization"))]
1006macro_rules! maybe_default {
1007 ($($t:tt)*) => {
1008 $($t)*
1009 };
1010}
1011
1012impl<T, const N: usize> Default for TinyVec<T, N> {
1013 fn default() -> Self {
1014 Self::new()
1015 }
1016}
1017
1018impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
1019 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1020 fmt::Debug::fmt(&self[..], f)
1021 }
1022}
1023
1024impl<T: PartialEq, const N: usize, S> PartialEq<S> for TinyVec<T, N>
1025where
1026 S: AsRef<[T]>,
1027{
1028 fn eq(&self, other: &S) -> bool {
1029 self.as_slice() == other.as_ref()
1030 }
1031}
1032
1033impl<T: PartialEq, const N: usize> Eq for TinyVec<T, N> {}
1034
1035impl<T, const N: usize> Deref for TinyVec<T, N> {
1036 type Target = [T];
1037
1038 fn deref(&self) -> &Self::Target {
1039 self.as_slice()
1040 }
1041}
1042
1043impl<T, const N: usize> DerefMut for TinyVec<T, N> {
1044 fn deref_mut(&mut self) -> &mut Self::Target {
1045 self.as_mut_slice()
1046 }
1047}
1048
1049impl<T, const N: usize> Drop for TinyVec<T, N> {
1050 fn drop(&mut self) {
1051 if mem::needs_drop::<T>() {
1052 for e in self.deref_mut() {
1053 unsafe { ptr::drop_in_place(e) };
1054 }
1055 }
1056 if !self.len.is_stack() {
1057 unsafe { self.inner.raw.destroy(); }
1058 }
1059 }
1060}
1061
1062impl<T, const N: usize> From<Vec<T>> for TinyVec<T, N> {
1063 fn from(value: Vec<T>) -> Self {
1064 Self::from_vec(value)
1065 }
1066}
1067
1068impl<T, const N: usize> FromIterator<T> for TinyVec<T, N> {
1069 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1070 let iter = iter.into_iter();
1071 let cap = match iter.size_hint() {
1072 (_, Some(max)) => max,
1073 (n, None) => n,
1074 };
1075 let mut vec = Self::with_capacity(cap);
1076 for elem in iter {
1077 unsafe { vec.push_unchecked(elem) };
1078 }
1079 vec
1080 }
1081}
1082
1083impl<T, const N: usize> From<TinyVec<T, N>> for Vec<T> {
1084 fn from(value: TinyVec<T, N>) -> Self {
1085 value.into_vec()
1086 }
1087}
1088
1089impl<T, const N: usize> AsRef<[T]> for TinyVec<T, N> {
1090 fn as_ref(&self) -> &[T] {
1091 self.as_slice()
1092 }
1093}
1094
1095impl<T, const N: usize> AsMut<[T]> for TinyVec<T, N> {
1096 fn as_mut(&mut self) -> &mut [T] {
1097 self.as_mut_slice()
1098 }
1099}
1100
1101impl<T: Clone, const N: usize> From<&[T]> for TinyVec<T, N> {
1102 maybe_default!(
1103 fn from(value: &[T]) -> Self {
1104 Self::from_slice(value)
1105 }
1106 );
1107}
1108
1109#[cfg(feature = "use-specialization")]
1110impl<T: Clone + Copy, const N: usize> From<&[T]> for TinyVec<T, N> {
1111 fn from(value: &[T]) -> Self {
1112 Self::from_slice_copied(value)
1113 }
1114}
1115
1116impl<T: Clone, const N: usize> From<&mut [T]> for TinyVec<T, N> {
1117 maybe_default!(
1118 fn from(value: &mut [T]) -> Self {
1119 Self::from_slice(value)
1120 }
1121 );
1122}
1123
1124#[cfg(feature = "use-specialization")]
1125impl<T: Clone + Copy, const N: usize> From<&mut [T]> for TinyVec<T, N> {
1126 fn from(value: &mut [T]) -> Self {
1127 Self::from_slice_copied(value)
1128 }
1129}
1130
1131impl<T, const N: usize> From<[T; N]> for TinyVec<T, N> {
1132 fn from(value: [T; N]) -> Self {
1133 Self::from_array_eq_size(value)
1134 }
1135}
1136
1137impl<T: Clone, const N: usize, const M: usize> From<&[T; M]> for TinyVec<T, N> {
1138 maybe_default!(
1139 fn from(value: &[T; M]) -> Self {
1140 Self::from_slice(value)
1141 }
1142 );
1143}
1144
1145#[cfg(feature = "use-specialization")]
1146impl<T: Clone + Copy, const N: usize, const M: usize> From<&[T; M]> for TinyVec<T, N> {
1147 fn from(value: &[T; M]) -> Self {
1148 Self::from_slice_copied(value as &[T])
1149 }
1150}
1151
1152impl<T: Clone, const N: usize> Clone for TinyVec<T, N> {
1153 maybe_default!(
1154 fn clone(&self) -> Self {
1155 Self::from_slice(self.as_slice())
1156 }
1157 );
1158}
1159
1160#[cfg(feature = "use-specialization")]
1161impl<T: Clone + Copy, const N: usize> Clone for TinyVec<T, N> {
1162 fn clone(&self) -> Self {
1163 Self::from_slice_copied(self.as_slice())
1164 }
1165}
1166
1167#[cfg(test)]
1168mod test;