1#![allow(incomplete_features)]
62#![cfg_attr(feature = "nightly-const-generics", feature(generic_const_exprs))]
63
64#![no_std]
65
66extern crate alloc;
67use alloc::vec::Vec;
68
69use core::mem::{self, ManuallyDrop, MaybeUninit};
70use core::ops::{Deref, DerefMut};
71use core::ptr::NonNull;
72use core::{fmt, ptr};
73use core::slice;
74
75mod raw;
76use raw::RawVec;
77
78union TinyVecInner<T, const N: usize> {
79 stack: ManuallyDrop<[MaybeUninit<T>; N]>,
80 raw: RawVec<T>,
81}
82
83impl<T, const N: usize> TinyVecInner<T, N> {
84 const unsafe fn as_ptr_stack(&self) -> *const T {
85 unsafe { &self.stack as *const _ as *const T }
86 }
87
88 const unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
89 unsafe { &mut self.stack as *mut _ as *mut T }
90 }
91
92 const unsafe fn as_ptr_heap(&self) -> *const T {
93 unsafe { self.raw.ptr.as_ptr() }
94 }
95
96 const unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
97 unsafe { self.raw.ptr.as_ptr() }
98 }
99}
100
101#[repr(transparent)]
102struct Length(usize);
103
104impl Length {
105 #[inline]
106 const fn new_stack(len: usize) -> Self {
107 Self(len << 1)
108 }
109
110 const fn new_heap(len: usize) -> Self {
111 Self(len << 1 | 0b1)
112 }
113
114 #[inline]
115 const fn is_stack(&self) -> bool {
116 (self.0 & 0b1) == 0
117 }
118
119 #[inline]
120 const fn set_heap(&mut self) {
121 self.0 |= 0b1;
122 }
123
124 #[inline]
125 const fn set_stack(&mut self) {
126 self.0 &= 0b0;
127 }
128
129 const fn set(&mut self, n: usize) {
130 self.0 &= 0b1;
131 self.0 |= n << 1;
132 }
133
134 #[inline]
135 const fn get(&self) -> usize {
136 self.0 >> 1
137 }
138
139 #[inline]
140 const fn add(&mut self, n: usize) {
141 self.0 += n << 1;
142 }
143
144 #[inline]
145 const fn sub(&mut self, n: usize) {
146 self.0 -= n << 1;
147 }
148}
149
150pub const fn n_elements_for_stack<T>() -> usize {
166 mem::size_of::<RawVec<T>>() / mem::size_of::<T>()
167}
168
169pub struct TinyVec<T,
171 #[cfg(not(feature = "nightly-const-generics"))]
172 const N: usize,
173
174 #[cfg(feature = "nightly-const-generics")]
175 const N: usize = { n_elements_for_stack::<T>() },
176> {
177 inner: TinyVecInner<T, N>,
178 len: Length,
179}
180
181impl<T, const N: usize> TinyVec<T, N> {
182
183 unsafe fn switch_to_heap(&mut self, n: usize) {
184 let mut vec = RawVec::new();
185 vec.expand_if_needed(0, self.len.get() + n);
186 unsafe {
187 let src = self.inner.as_ptr_stack();
188 let dst = vec.ptr.as_ptr();
189 ptr::copy(
190 src,
191 dst,
192 self.len.get()
193 );
194 self.inner.raw = vec;
195 }
196 self.len.set_heap();
197 }
198
199 unsafe fn switch_to_stack(&mut self) {
200 let mut rv = unsafe { self.inner.raw };
201
202 let stack = [ const { MaybeUninit::uninit() }; N ];
203
204 unsafe {
205 let src = rv.ptr.as_ptr();
206 let dst = stack.as_ptr() as *mut T;
207 ptr::copy(
208 src,
209 dst,
210 self.len.get()
211 );
212
213 rv.destroy();
214 }
215
216 self.inner.stack = ManuallyDrop::new(stack);
217 self.len.set_stack();
218 }
219}
220
221impl<T, const N: usize> TinyVec<T, N> {
222
223 pub const fn new() -> Self {
225 let stack = [ const { MaybeUninit::uninit() }; N ];
226 Self {
227 inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
228 len: Length(0),
229 }
230 }
231
232 pub fn with_capacity(cap: usize) -> Self {
234 let mut len = Length(0);
235 let inner = if cap <= N {
236 let s = [ const { MaybeUninit::uninit() }; N ];
237 TinyVecInner { stack: ManuallyDrop::new(s) }
238 } else {
239 len.set_heap();
240 TinyVecInner { raw: RawVec::with_capacity(cap) }
241 };
242
243 Self { inner, len }
244 }
245
246 pub const fn from_array(arr: [T; N]) -> Self {
248 let md = ManuallyDrop::new(arr);
249 let arr: [MaybeUninit<T>; N] = unsafe { mem::transmute_copy(&md) };
252 let arr = ManuallyDrop::new(arr);
253 Self {
254 inner: TinyVecInner { stack: arr },
255 len: Length::new_stack(N)
256 }
257 }
258
259 #[inline]
261 pub const fn len(&self) -> usize { self.len.get() }
262
263 #[inline]
265 pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
266
267 pub const fn capacity(&self) -> usize {
269 if self.len.is_stack() {
270 N
271 } else {
272 unsafe { self.inner.raw.cap }
273 }
274 }
275
276 #[inline]
295 pub const fn lives_on_stack(&self) -> bool { self.len.is_stack() }
296
297 pub const fn as_ptr(&self) -> *const T {
299 unsafe {
300 if self.len.is_stack() {
301 self.inner.as_ptr_stack()
302 } else {
303 self.inner.as_ptr_heap()
304 }
305 }
306 }
307
308 pub const fn as_mut_ptr(&mut self) -> *mut T {
310 unsafe {
311 if self.len.is_stack() {
312 self.inner.as_ptr_stack_mut()
313 } else {
314 self.inner.as_ptr_heap_mut()
315 }
316 }
317 }
318
319 pub const fn as_non_null(&mut self) -> NonNull<T> {
321 unsafe {
322 NonNull::new_unchecked(self.as_mut_ptr())
323 }
324 }
325
326 pub const fn as_slice(&self) -> &[T] {
328 unsafe {
329 slice::from_raw_parts(self.as_ptr(), self.len.get())
330 }
331 }
332
333 pub const fn as_mut_slice(&mut self) -> &mut [T] {
335 unsafe {
336 slice::from_raw_parts_mut(self.as_mut_ptr(), self.len.get())
337 }
338 }
339
340 pub fn reserve(&mut self, n: usize) {
342 if self.len.is_stack() {
343 if self.len.get() + n > N {
344 unsafe {
345 self.switch_to_heap(n);
346 }
347 }
348 } else {
349 unsafe {
350 self.inner.raw.expand_if_needed(self.len.get(), n);
351 }
352 }
353 }
354
355 pub fn push(&mut self, elem: T) {
357 self.reserve(1);
358 unsafe { self.push_unchecked(elem); }
359 }
360
361 pub unsafe fn push_unchecked(&mut self, elem: T) {
369 unsafe {
370 let dst = self.as_mut_ptr().add(self.len.get());
371 dst.write(elem);
372 }
373 self.len.add(1);
374 }
375
376 pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
380 if self.len.get() < self.capacity() {
381 unsafe { self.push_unchecked(val); }
382 Ok(())
383 } else {
384 Err(val)
385 }
386 }
387 pub fn pop(&mut self) -> Option<T> {
389 if self.len.get() == 0 {
390 None
391 } else {
392 self.len.sub(1);
393 let val =
394 unsafe {
395 self.as_ptr().add(self.len.get()).read()
396 };
397 Some(val)
398 }
399 }
400
401 #[inline]
403 pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
404 if index > self.len.get() { return Err(elem) }
405 unsafe { self.insert_unckecked(index, elem); }
406 Ok(())
407 }
408
409 pub unsafe fn insert_unckecked(&mut self, index: usize, elem: T) {
413 self.reserve(1);
414
415 unsafe {
416 ptr::copy(
417 self.as_ptr().add(index),
418 self.as_mut_ptr().add(index + 1),
419 self.len.get() - index,
420 );
421 self.as_mut_ptr().add(index).write(elem);
422 }
423 self.len.add(1);
424 }
425
426 pub fn resize(&mut self, cap: usize, elem: T)
428 where
429 T: Clone
430 {
431 if cap < self.len() {
432 self.truncate(cap);
433 } else {
434 let n = self.len() - cap;
435 self.reserve(n);
436
437 unsafe {
438 let mut ptr = self.as_mut_ptr().add(self.len());
439 let len = &mut self.len;
440
441 for _ in 1..n {
442 ptr::write(ptr, elem.clone());
443 ptr = ptr.add(1);
444 len.add(1);
445 }
446
447 if n > 0 {
448 ptr::write(ptr, elem);
449 len.add(1);
450 }
451
452 }
453 }
454 }
455
456 pub fn resize_with<F>(&mut self, cap: usize, mut f: F)
475 where
476 F: FnMut() -> T
477 {
478 if cap < self.len() {
479 self.truncate(cap);
480 } else {
481 let n = cap - self.len();
482 self.reserve(n);
483
484 unsafe {
485 let mut ptr = self.as_mut_ptr().add(self.len());
486 let len = &mut self.len;
487
488 for _ in 0..n {
489 ptr::write(ptr, f());
490 ptr = ptr.add(1);
491 len.add(1);
492 }
493 }
494 }
495 }
496
497 pub unsafe fn resize_zeroed(&mut self, cap: usize) {
521 if cap < self.len() {
522 self.truncate(cap);
523 } else {
524 let n = cap - self.len();
525 self.reserve(n);
526 unsafe {
527 let ptr = self.as_mut_ptr().add(self.len());
528 ptr.write_bytes(0, n);
529 }
530 self.len.add(n);
531 }
532 }
533
534 pub fn reserve_exact(&mut self, n: usize) {
536 if self.len.is_stack() {
537 if self.len.get() + n > N {
538 unsafe { self.switch_to_heap(n); }
539 }
540 } else {
541 let vec = unsafe { &mut self.inner.raw };
542 let len = self.len.get();
543 let new_cap = vec.cap.max(len + n);
544 vec.expand_if_needed_exact(len, new_cap);
545 }
546 }
547
548 pub unsafe fn remove_unchecked(&mut self, index: usize) -> T {
551 debug_assert!(index < self.len.get(), "Index is >= than {}, this will trigger UB", self.len.get());
552
553 unsafe {
554 self.len.sub(1);
555 let result = self.as_mut_ptr().add(index).read();
556 ptr::copy(
557 self.as_ptr().add(index + 1),
558 self.as_mut_ptr().add(index),
559 self.len.get() - index,
560 );
561 result
562 }
563 }
564
565 #[inline]
566 pub fn remove(&mut self, index: usize) -> Option<T> {
567 if index >= self.len.get() { return None }
568 Some(unsafe { self.remove_unchecked(index) })
571 }
572
573 pub fn swap(&mut self, a: usize, b: usize) {
578 self.swap_checked(a, b).unwrap_or_else(|| {
579 panic!("Index out of bounds")
580 });
581 }
582
583 pub fn swap_checked(&mut self, a: usize, b: usize) -> Option<()> {
587 if a >= self.len.get() {
588 return None
589 };
590 if b >= self.len.get() {
591 return None
592 };
593 unsafe { self.swap_unchecked(a, b); }
594 Some(())
595 }
596
597 pub unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
603 unsafe {
604 let ap = self.as_mut_ptr().add(a);
605 let bp = self.as_mut_ptr().add(b);
606 ptr::swap(ap, bp);
607 }
608 }
609
610 pub fn swap_remove(&mut self, index: usize) -> Option<T> {
612 if index >= self.len.get() {
613 None
614 } else if index == self.len.get() - 1 {
615 self.pop()
616 } else {
617 unsafe { self.swap_unchecked(index, self.len.get() - 1) }
618 self.pop()
619 }
620 }
621
622 #[inline]
623 pub fn shrink_to_fit(&mut self) {
625 if self.len.is_stack() { return }
626
627 if self.len.get() <= N {
628 unsafe { self.switch_to_stack(); }
629 } else {
630 unsafe { self.inner.raw.shrink_to_fit(self.len.get()); };
631 }
632 }
633
634 #[inline]
644 pub const unsafe fn set_length(&mut self, len: usize) {
645 self.len.set(len);
646 }
647
648 pub fn truncate(&mut self, len: usize) {
651 if len < self.len.get() {
652 for e in &mut self[len..] {
653 unsafe { ptr::drop_in_place(e) }
654 }
655 unsafe { self.set_length(len); }
656 }
657 }
658
659 pub fn push_slice(&mut self, s: &[T])
661 where
662 T: Copy
663 {
664 let len = s.len();
665 let src = s.as_ptr();
666
667 self.reserve(len);
668
669 unsafe {
670 ptr::copy(
671 src,
672 self.as_mut_ptr().add(self.len.get()),
673 len
674 )
675 }
676
677 self.len.add(len);
678 }
679
680 pub fn extend_from<I>(&mut self, it: I)
682 where
683 I: IntoIterator<Item = T>,
684 {
685 let it = it.into_iter();
686
687 let (min, max) = it.size_hint();
688 let reserve = match max {
689 Some(max) => max,
690 None => min,
691 };
692
693 if reserve > 0 {
694 self.reserve(reserve);
695 }
696
697 for elem in it {
698 unsafe { self.push_unchecked(elem); }
699 }
700 }
701}
702
703impl<T, const N: usize> Default for TinyVec<T, N> {
704 fn default() -> Self {
705 Self::new()
706 }
707}
708
709impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
710 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
711 fmt::Debug::fmt(&self[..], f)
712 }
713}
714
715impl<T: PartialEq, const N: usize> PartialEq for TinyVec<T, N> {
716 fn eq(&self, other: &Self) -> bool {
717 self.deref() == other.deref()
718 }
719}
720
721impl<T, const N: usize> Deref for TinyVec<T, N> {
722 type Target = [T];
723
724 fn deref(&self) -> &Self::Target {
725 self.as_slice()
726 }
727}
728
729
730impl<T, const N: usize> DerefMut for TinyVec<T, N> {
731 fn deref_mut(&mut self) -> &mut Self::Target {
732 self.as_mut_slice()
733 }
734}
735
736impl<T, const N: usize> Drop for TinyVec<T, N> {
737 fn drop(&mut self) {
738 if mem::needs_drop::<T>() {
739 for e in self.deref_mut() {
740 unsafe { ptr::drop_in_place(e) };
741 }
742 }
743 if !self.len.is_stack() {
744 unsafe { self.inner.raw.destroy(); }
745 }
746 }
747}
748
749impl<T, const N: usize> From<Vec<T>> for TinyVec<T, N> {
750 fn from(value: Vec<T>) -> Self {
751 let mut md = ManuallyDrop::new(value);
752 let ptr = unsafe { NonNull::new_unchecked( md.as_mut_ptr() ) };
753 let inner = TinyVecInner {
754 raw: RawVec {
755 cap: md.capacity(),
756 ptr,
757 }
758 };
759
760 Self {
761 inner,
762 len: Length::new_heap(md.len()),
763 }
764 }
765}
766
767impl<T: Copy, const N: usize> From<&[T]> for TinyVec<T, N> {
768 fn from(value: &[T]) -> Self {
769 let mut v = Self::with_capacity(value.len());
770 v.push_slice(value);
771 v
772 }
773}
774
775impl<T: Copy, const N: usize, const M: usize> From<&[T; M]> for TinyVec<T, N> {
776 fn from(value: &[T; M]) -> Self {
777 Self::from(value as &[T])
778 }
779}
780
781impl<T, const N: usize> From<[T; N]> for TinyVec<T, N> {
782 fn from(value: [T; N]) -> Self {
783 Self::from_array(value)
784 }
785}
786
787impl<T, const N: usize> FromIterator<T> for TinyVec<T, N> {
788 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
789 let iter = iter.into_iter();
790 let cap = match iter.size_hint() {
791 (_, Some(max)) => max,
792 (n, None) => n,
793 };
794 let mut vec = Self::with_capacity(cap);
795 for elem in iter {
796 unsafe { vec.push_unchecked(elem) };
797 }
798 vec
799 }
800}
801
802#[cfg(test)]
803mod test;