1#![allow(incomplete_features)]
61#![cfg_attr(feature = "nightly-const-generics", feature(generic_const_exprs))]
62
63#![no_std]
64
65extern crate alloc;
66use alloc::vec::Vec;
67
68use core::mem::{self, ManuallyDrop, MaybeUninit};
69use core::ops::{Deref, DerefMut};
70use core::ptr::NonNull;
71use core::{fmt, ptr};
72use core::slice;
73
74mod raw;
75use raw::RawVec;
76
77union TinyVecInner<T, const N: usize> {
78 stack: ManuallyDrop<[MaybeUninit<T>; N]>,
79 raw: RawVec<T>,
80}
81
82impl<T, const N: usize> TinyVecInner<T, N> {
83
84 #[inline(always)]
85 const unsafe fn as_ptr_stack(&self) -> *const T {
86 unsafe { &self.stack as *const _ as *const T }
87 }
88
89 #[inline(always)]
90 const unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
91 unsafe { &mut self.stack as *mut _ as *mut T }
92 }
93
94 #[inline(always)]
95 const unsafe fn as_ptr_heap(&self) -> *const T {
96 unsafe { self.raw.ptr.as_ptr() }
97 }
98
99 #[inline(always)]
100 const unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
101 unsafe { self.raw.ptr.as_ptr() }
102 }
103}
104
105#[repr(transparent)]
106struct Length(usize);
107
108impl Length {
109 #[inline(always)]
110 const fn new_stack(len: usize) -> Self {
111 Self(len << 1)
112 }
113
114 #[inline(always)]
115 const fn new_heap(len: usize) -> Self {
116 Self(len << 1 | 0b1)
117 }
118
119 #[inline(always)]
120 const fn is_stack(&self) -> bool {
121 (self.0 & 0b1) == 0
122 }
123
124 #[inline(always)]
125 const fn set_heap(&mut self) {
126 self.0 |= 0b1;
127 }
128
129 #[inline(always)]
130 const fn set_stack(&mut self) {
131 self.0 &= 0b0;
132 }
133
134 #[inline(always)]
135 const fn set(&mut self, n: usize) {
136 self.0 &= 0b1;
137 self.0 |= n << 1;
138 }
139
140 #[inline(always)]
141 const fn get(&self) -> usize {
142 self.0 >> 1
143 }
144
145 #[inline(always)]
146 const fn add(&mut self, n: usize) {
147 self.0 += n << 1;
148 }
149
150 #[inline(always)]
151 const fn sub(&mut self, n: usize) {
152 self.0 -= n << 1;
153 }
154}
155
156pub const fn n_elements_for_stack<T>() -> usize {
172 mem::size_of::<RawVec<T>>() / mem::size_of::<T>()
173}
174
175pub struct TinyVec<T,
177 #[cfg(not(feature = "nightly-const-generics"))]
178 const N: usize,
179
180 #[cfg(feature = "nightly-const-generics")]
181 const N: usize = { n_elements_for_stack::<T>() },
182> {
183 inner: TinyVecInner<T, N>,
184 len: Length,
185}
186
187impl<T, const N: usize> TinyVec<T, N> {
188
189 unsafe fn switch_to_heap(&mut self, n: usize) {
190 let mut vec = RawVec::new();
191 vec.expand_if_needed(0, self.len.get() + n);
192 unsafe {
193 let src = self.inner.as_ptr_stack();
194 let dst = vec.ptr.as_ptr();
195 ptr::copy(
196 src,
197 dst,
198 self.len.get()
199 );
200 self.inner.raw = vec;
201 }
202 self.len.set_heap();
203 }
204
205 unsafe fn switch_to_stack(&mut self) {
206 let mut rv = unsafe { self.inner.raw };
207
208 let stack = [ const { MaybeUninit::uninit() }; N ];
209
210 unsafe {
211 let src = rv.ptr.as_ptr();
212 let dst = stack.as_ptr() as *mut T;
213 ptr::copy(
214 src,
215 dst,
216 self.len.get()
217 );
218
219 rv.destroy();
220 }
221
222 self.inner.stack = ManuallyDrop::new(stack);
223 self.len.set_stack();
224 }
225}
226
227impl<T, const N: usize> TinyVec<T, N> {
228
229 pub const fn new() -> Self {
231 let stack = [ const { MaybeUninit::uninit() }; N ];
232 Self {
233 inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
234 len: Length(0),
235 }
236 }
237
238 pub fn with_capacity(cap: usize) -> Self {
240 let mut len = Length(0);
241 let inner = if cap <= N {
242 let s = [ const { MaybeUninit::uninit() }; N ];
243 TinyVecInner { stack: ManuallyDrop::new(s) }
244 } else {
245 len.set_heap();
246 TinyVecInner { raw: RawVec::with_capacity(cap) }
247 };
248
249 Self { inner, len }
250 }
251
252 pub const fn from_array(arr: [T; N]) -> Self {
254 let md = ManuallyDrop::new(arr);
255 let arr: [MaybeUninit<T>; N] = unsafe { mem::transmute_copy(&md) };
258 let arr = ManuallyDrop::new(arr);
259 Self {
260 inner: TinyVecInner { stack: arr },
261 len: Length::new_stack(N)
262 }
263 }
264
265 #[inline]
267 pub const fn len(&self) -> usize { self.len.get() }
268
269 #[inline]
271 pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
272
273 #[inline]
275 pub const fn capacity(&self) -> usize {
276 if self.len.is_stack() {
277 N
278 } else {
279 unsafe { self.inner.raw.cap }
280 }
281 }
282
283 #[inline]
302 pub const fn lives_on_stack(&self) -> bool { self.len.is_stack() }
303
304 #[inline]
306 pub const fn as_ptr(&self) -> *const T {
307 unsafe {
308 if self.len.is_stack() {
309 self.inner.as_ptr_stack()
310 } else {
311 self.inner.as_ptr_heap()
312 }
313 }
314 }
315
316 #[inline]
318 pub const fn as_mut_ptr(&mut self) -> *mut T {
319 unsafe {
320 if self.len.is_stack() {
321 self.inner.as_ptr_stack_mut()
322 } else {
323 self.inner.as_ptr_heap_mut()
324 }
325 }
326 }
327
328 #[inline]
330 pub const fn as_non_null(&mut self) -> NonNull<T> {
331 unsafe {
332 NonNull::new_unchecked(self.as_mut_ptr())
333 }
334 }
335
336 #[inline]
338 pub const fn as_slice(&self) -> &[T] {
339 unsafe {
340 slice::from_raw_parts(self.as_ptr(), self.len.get())
341 }
342 }
343
344 #[inline]
346 pub const fn as_mut_slice(&mut self) -> &mut [T] {
347 unsafe {
348 slice::from_raw_parts_mut(self.as_mut_ptr(), self.len.get())
349 }
350 }
351
352 pub fn reserve(&mut self, n: usize) {
354 if self.len.is_stack() {
355 if self.len.get() + n > N {
356 unsafe {
357 self.switch_to_heap(n);
358 }
359 }
360 } else {
361 unsafe {
362 self.inner.raw.expand_if_needed(self.len.get(), n);
363 }
364 }
365 }
366
367 #[inline]
369 pub fn push(&mut self, elem: T) {
370 self.reserve(1);
371 unsafe { self.push_unchecked(elem); }
372 }
373
374 pub unsafe fn push_unchecked(&mut self, elem: T) {
382 unsafe {
383 let dst = self.as_mut_ptr().add(self.len.get());
384 dst.write(elem);
385 }
386 self.len.add(1);
387 }
388
389 pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
393 if self.len.get() < self.capacity() {
394 unsafe { self.push_unchecked(val); }
395 Ok(())
396 } else {
397 Err(val)
398 }
399 }
400 pub fn pop(&mut self) -> Option<T> {
402 if self.len.get() == 0 {
403 None
404 } else {
405 self.len.sub(1);
406 let val =
407 unsafe {
408 self.as_ptr().add(self.len.get()).read()
409 };
410 Some(val)
411 }
412 }
413
414 pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
416 if index > self.len.get() { return Err(elem) }
417 unsafe { self.insert_unckecked(index, elem); }
418 Ok(())
419 }
420
421 pub unsafe fn insert_unckecked(&mut self, index: usize, elem: T) {
425 self.reserve(1);
426
427 unsafe {
428 ptr::copy(
429 self.as_ptr().add(index),
430 self.as_mut_ptr().add(index + 1),
431 self.len.get() - index,
432 );
433 self.as_mut_ptr().add(index).write(elem);
434 }
435 self.len.add(1);
436 }
437
438 pub fn resize(&mut self, cap: usize, elem: T)
440 where
441 T: Clone
442 {
443 if cap < self.len() {
444 self.truncate(cap);
445 } else {
446 let n = self.len() - cap;
447 self.reserve(n);
448
449 unsafe {
450 let mut ptr = self.as_mut_ptr().add(self.len());
451 let len = &mut self.len;
452
453 for _ in 1..n {
454 ptr::write(ptr, elem.clone());
455 ptr = ptr.add(1);
456 len.add(1);
457 }
458
459 if n > 0 {
460 ptr::write(ptr, elem);
461 len.add(1);
462 }
463
464 }
465 }
466 }
467
468 pub fn resize_with<F>(&mut self, cap: usize, mut f: F)
487 where
488 F: FnMut() -> T
489 {
490 if cap < self.len() {
491 self.truncate(cap);
492 } else {
493 let n = cap - self.len();
494 self.reserve(n);
495
496 unsafe {
497 let mut ptr = self.as_mut_ptr().add(self.len());
498 let len = &mut self.len;
499
500 for _ in 0..n {
501 ptr::write(ptr, f());
502 ptr = ptr.add(1);
503 len.add(1);
504 }
505 }
506 }
507 }
508
509 pub unsafe fn resize_zeroed(&mut self, cap: usize) {
533 if cap < self.len() {
534 self.truncate(cap);
535 } else {
536 let n = cap - self.len();
537 self.reserve(n);
538 unsafe {
539 let ptr = self.as_mut_ptr().add(self.len());
540 ptr.write_bytes(0, n);
541 }
542 self.len.add(n);
543 }
544 }
545
546 pub fn reserve_exact(&mut self, n: usize) {
548 if self.len.is_stack() {
549 if self.len.get() + n > N {
550 unsafe { self.switch_to_heap(n); }
551 }
552 } else {
553 let vec = unsafe { &mut self.inner.raw };
554 let len = self.len.get();
555 let new_cap = vec.cap.max(len + n);
556 vec.expand_if_needed_exact(len, new_cap);
557 }
558 }
559
560 pub unsafe fn remove_unchecked(&mut self, index: usize) -> T {
563 debug_assert!(index < self.len.get(), "Index is >= than {}, this will trigger UB", self.len.get());
564
565 unsafe {
566 self.len.sub(1);
567 let result = self.as_mut_ptr().add(index).read();
568 ptr::copy(
569 self.as_ptr().add(index + 1),
570 self.as_mut_ptr().add(index),
571 self.len.get() - index,
572 );
573 result
574 }
575 }
576
577 #[inline]
578 pub fn remove(&mut self, index: usize) -> Option<T> {
579 if index >= self.len.get() { return None }
580 Some(unsafe { self.remove_unchecked(index) })
582 }
583
584 #[inline]
589 pub fn swap(&mut self, a: usize, b: usize) {
590 self.swap_checked(a, b).unwrap_or_else(|| {
591 panic!("Index out of bounds")
592 });
593 }
594
595 pub fn swap_checked(&mut self, a: usize, b: usize) -> Option<()> {
599 if a >= self.len.get() {
600 return None
601 };
602 if b >= self.len.get() {
603 return None
604 };
605 unsafe { self.swap_unchecked(a, b); }
606 Some(())
607 }
608
609 pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
615 unsafe {
616 let ap = self.as_mut_ptr().add(a);
617 let bp = self.as_mut_ptr().add(b);
618 ptr::swap(ap, bp);
619 }
620 }
621
622 pub fn swap_remove(&mut self, index: usize) -> Option<T> {
624 if index >= self.len.get() {
625 None
626 } else if index == self.len.get() - 1 {
627 self.pop()
628 } else {
629 unsafe { self.swap_unchecked(index, self.len.get() - 1) }
630 self.pop()
631 }
632 }
633
634 #[inline]
635 pub fn shrink_to_fit(&mut self) {
637 if self.len.is_stack() { return }
638
639 if self.len.get() <= N {
640 unsafe { self.switch_to_stack(); }
641 } else {
642 unsafe { self.inner.raw.shrink_to_fit(self.len.get()); };
643 }
644 }
645
646 #[inline]
656 pub const unsafe fn set_length(&mut self, len: usize) {
657 self.len.set(len);
658 }
659
660 pub fn truncate(&mut self, len: usize) {
663 if len < self.len.get() {
664 for e in &mut self[len..] {
665 unsafe { ptr::drop_in_place(e) }
666 }
667 unsafe { self.set_length(len); }
668 }
669 }
670
671 pub fn push_slice(&mut self, s: &[T])
673 where
674 T: Copy
675 {
676 let len = s.len();
677 let src = s.as_ptr();
678
679 self.reserve(len);
680
681 unsafe {
682 ptr::copy(
683 src,
684 self.as_mut_ptr().add(self.len.get()),
685 len
686 )
687 }
688
689 self.len.add(len);
690 }
691
692 pub fn extend_from<I>(&mut self, it: I)
694 where
695 I: IntoIterator<Item = T>,
696 {
697 let it = it.into_iter();
698
699 let (min, max) = it.size_hint();
700 let reserve = match max {
701 Some(max) => max,
702 None => min,
703 };
704
705 if reserve > 0 {
706 self.reserve(reserve);
707 }
708
709 for elem in it {
710 unsafe { self.push_unchecked(elem); }
711 }
712 }
713}
714
715impl<T, const N: usize> Default for TinyVec<T, N> {
716 fn default() -> Self {
717 Self::new()
718 }
719}
720
721impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
722 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
723 fmt::Debug::fmt(&self[..], f)
724 }
725}
726
727impl<T: PartialEq, const N: usize> PartialEq for TinyVec<T, N> {
728 fn eq(&self, other: &Self) -> bool {
729 self.deref() == other.deref()
730 }
731}
732
733impl<T, const N: usize> Deref for TinyVec<T, N> {
734 type Target = [T];
735
736 fn deref(&self) -> &Self::Target {
737 self.as_slice()
738 }
739}
740
741
742impl<T, const N: usize> DerefMut for TinyVec<T, N> {
743 fn deref_mut(&mut self) -> &mut Self::Target {
744 self.as_mut_slice()
745 }
746}
747
748impl<T, const N: usize> Drop for TinyVec<T, N> {
749 fn drop(&mut self) {
750 if mem::needs_drop::<T>() {
751 for e in self.deref_mut() {
752 unsafe { ptr::drop_in_place(e) };
753 }
754 }
755 if !self.len.is_stack() {
756 unsafe { self.inner.raw.destroy(); }
757 }
758 }
759}
760
761impl<T, const N: usize> From<Vec<T>> for TinyVec<T, N> {
762 fn from(value: Vec<T>) -> Self {
763 let mut md = ManuallyDrop::new(value);
764 let ptr = unsafe { NonNull::new_unchecked( md.as_mut_ptr() ) };
765 let inner = TinyVecInner {
766 raw: RawVec {
767 cap: md.capacity(),
768 ptr,
769 }
770 };
771
772 Self {
773 inner,
774 len: Length::new_heap(md.len()),
775 }
776 }
777}
778
779impl<T: Copy, const N: usize> From<&[T]> for TinyVec<T, N> {
780 fn from(value: &[T]) -> Self {
781 let mut v = Self::with_capacity(value.len());
782 v.push_slice(value);
783 v
784 }
785}
786
787impl<T: Copy, const N: usize, const M: usize> From<&[T; M]> for TinyVec<T, N> {
788 fn from(value: &[T; M]) -> Self {
789 Self::from(value as &[T])
790 }
791}
792
793impl<T, const N: usize> From<[T; N]> for TinyVec<T, N> {
794 fn from(value: [T; N]) -> Self {
795 Self::from_array(value)
796 }
797}
798
799impl<T, const N: usize> FromIterator<T> for TinyVec<T, N> {
800 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
801 let iter = iter.into_iter();
802 let cap = match iter.size_hint() {
803 (_, Some(max)) => max,
804 (n, None) => n,
805 };
806 let mut vec = Self::with_capacity(cap);
807 for elem in iter {
808 unsafe { vec.push_unchecked(elem) };
809 }
810 vec
811 }
812}
813
814#[cfg(test)]
815mod test;