1#![allow(incomplete_features)]
62#![cfg_attr(feature = "nightly-const-generics", feature(generic_const_exprs))]
63
64use core::mem::{self, ManuallyDrop, MaybeUninit};
65use core::ops::{Deref, DerefMut};
66use core::ptr::NonNull;
67use core::{fmt, ptr};
68use core::slice;
69
70mod raw;
71use raw::RawVec;
72
73union TinyVecInner<T, const N: usize> {
74 stack: ManuallyDrop<[MaybeUninit<T>; N]>,
75 raw: RawVec<T>,
76}
77
78impl<T, const N: usize> TinyVecInner<T, N> {
79 unsafe fn as_ptr_stack(&self) -> *const T {
80 unsafe { &self.stack as *const _ as *const T }
81 }
82
83 unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
84 unsafe { &mut self.stack as *mut _ as *mut T }
85 }
86
87 unsafe fn as_ptr_heap(&self) -> *const T {
88 unsafe { self.raw.ptr.as_ptr() }
89 }
90
91 unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
92 unsafe { self.raw.ptr.as_ptr() }
93 }
94}
95
96#[repr(transparent)]
97struct Length(usize);
98
99impl Length {
100 const fn new_heap(len: usize) -> Self {
101 Self(len << 1 | 0b1)
102 }
103
104 #[inline]
105 const fn is_stack(&self) -> bool {
106 (self.0 & 0b1) == 0
107 }
108
109 #[inline]
110 const fn set_heap(&mut self) {
111 self.0 |= 0b1;
112 }
113
114 #[inline]
115 const fn set_stack(&mut self) {
116 self.0 &= 0b0;
117 }
118
119 #[inline]
120 const fn get(&self) -> usize {
121 self.0 >> 1
122 }
123
124 #[inline]
125 const fn add(&mut self, n: usize) {
126 self.0 += n << 1;
127 }
128
129 #[inline]
130 const fn sub(&mut self, n: usize) {
131 self.0 -= n << 1;
132 }
133}
134
135pub const fn n_elements_for_stack<T>() -> usize {
151 mem::size_of::<RawVec<T>>() / mem::size_of::<T>()
152}
153
154pub struct TinyVec<T,
156 #[cfg(not(feature = "nightly-const-generics"))]
157 const N: usize,
158
159 #[cfg(feature = "nightly-const-generics")]
160 const N: usize = { n_elements_for_stack::<T>() },
161> {
162 inner: TinyVecInner<T, N>,
163 len: Length,
164}
165
166impl<T, const N: usize> TinyVec<T, N> {
167
168 fn ptr(&self) -> *const T {
169 unsafe {
170 if self.len.is_stack() {
171 self.inner.as_ptr_stack()
172 } else {
173 self.inner.as_ptr_heap()
174 }
175 }
176 }
177
178 fn ptr_mut(&mut self) -> *mut T {
179 unsafe {
180 if self.len.is_stack() {
181 self.inner.as_ptr_stack_mut()
182 } else {
183 self.inner.as_ptr_heap_mut()
184 }
185 }
186 }
187
188 unsafe fn switch_to_heap(&mut self, n: usize) {
189 let mut vec = RawVec::new();
190 vec.expand_if_needed(0, self.len.get() + n);
191 unsafe {
192 let src = self.inner.as_ptr_stack();
193 let dst = vec.ptr.as_ptr();
194 ptr::copy(
195 src,
196 dst,
197 self.len.get()
198 );
199 self.inner.raw = vec;
200 }
201 self.len.set_heap();
202 }
203
204 unsafe fn switch_to_stack(&mut self) {
205 let mut rv = unsafe { self.inner.raw };
206
207 let stack = [ const { MaybeUninit::uninit() }; N ];
208
209 unsafe {
210 let src = rv.ptr.as_ptr();
211 let dst = stack.as_ptr() as *mut T;
212 ptr::copy(
213 src,
214 dst,
215 self.len.get()
216 );
217
218 rv.destroy();
219 }
220
221 self.inner.stack = ManuallyDrop::new(stack);
222 self.len.set_stack();
223 }
224}
225
226impl<T, const N: usize> TinyVec<T, N> {
227
228 pub const fn new() -> Self {
230 let stack = [ const { MaybeUninit::uninit() }; N ];
231 Self {
232 inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
233 len: Length(0),
234 }
235 }
236
237 pub fn with_capacity(cap: usize) -> Self {
239 let mut len = Length(0);
240 let inner = if cap <= N {
241 let s = [ const { MaybeUninit::uninit() }; N ];
242 TinyVecInner { stack: ManuallyDrop::new(s) }
243 } else {
244 len.set_heap();
245 TinyVecInner { raw: RawVec::with_capacity(cap) }
246 };
247
248 Self { inner, len }
249 }
250
251 #[inline]
253 pub const fn len(&self) -> usize { self.len.get() }
254
255 #[inline]
257 pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
258
259 pub const fn capacity(&self) -> usize {
261 if self.len.is_stack() {
262 N
263 } else {
264 unsafe { self.inner.raw.cap }
265 }
266 }
267
268 #[inline]
272 pub const fn lives_on_stack(&self) -> bool { self.len.is_stack() }
273
274 pub fn reserve(&mut self, n: usize) {
276 if self.len.is_stack() {
277 if self.len.get() + n > N {
278 unsafe {
279 self.switch_to_heap(n);
280 }
281 }
282 } else {
283 unsafe {
284 self.inner.raw.expand_if_needed(self.len.get(), n);
285 }
286 }
287 }
288
289 pub fn push(&mut self, elem: T) {
291 self.reserve(1);
292 unsafe { self.push_unchecked(elem); }
293 }
294
295 pub unsafe fn push_unchecked(&mut self, elem: T) {
303 unsafe {
304 let dst = self.ptr_mut().add(self.len.get());
305 dst.write(elem);
306 }
307 self.len.add(1);
308 }
309
310 pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
314 if self.len.get() < self.capacity() {
315 unsafe { self.push_unchecked(val); }
316 Ok(())
317 } else {
318 Err(val)
319 }
320 }
321 pub fn pop(&mut self) -> Option<T> {
323 if self.len.get() == 0 {
324 None
325 } else {
326 self.len.sub(1);
327 let val =
328 unsafe {
329 self.ptr().add(self.len.get()).read()
330 };
331 Some(val)
332 }
333 }
334
335 #[inline]
337 pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
338 if index > self.len.get() { return Err(elem) }
339 unsafe { self.insert_unckecked(index, elem); }
340 Ok(())
341 }
342
343 pub unsafe fn insert_unckecked(&mut self, index: usize, elem: T) {
347 self.reserve(1);
348
349 unsafe {
350 ptr::copy(
351 self.ptr().add(index),
352 self.ptr_mut().add(index + 1),
353 self.len.get() - index,
354 );
355 self.ptr_mut().add(index).write(elem);
356 }
357 self.len.add(1);
358 }
359
360 pub fn reserve_exact(&mut self, n: usize) {
362 if self.len.is_stack() {
363 if self.len.get() + n > N {
364 unsafe { self.switch_to_heap(n); }
365 }
366 } else {
367 let vec = unsafe { &mut self.inner.raw };
368 let len = self.len.get();
369 let new_cap = vec.cap.max(len + n);
370 vec.expand_if_needed_exact(len, new_cap);
371 }
372 }
373
374 pub unsafe fn remove_unchecked(&mut self, index: usize) -> T {
377 debug_assert!(index < self.len.get(), "Index is >= than {}, this will trigger UB", self.len.get());
378
379 unsafe {
380 self.len.sub(1);
381 let result = self.ptr_mut().add(index).read();
382 ptr::copy(
383 self.ptr().add(index + 1),
384 self.ptr_mut().add(index),
385 self.len.get() - index,
386 );
387 result
388 }
389 }
390
391 #[inline]
392 pub fn remove(&mut self, index: usize) -> Option<T> {
393 if index >= self.len.get() { return None }
394 Some(unsafe { self.remove_unchecked(index) })
397 }
398
399 pub fn swap(&mut self, a: usize, b: usize) {
404 self.swap_checked(a, b).unwrap_or_else(|| {
405 panic!("Index out of bounds")
406 });
407 }
408
409 pub fn swap_checked(&mut self, a: usize, b: usize) -> Option<()> {
413 if a >= self.len.get() {
414 return None
415 };
416 if b >= self.len.get() {
417 return None
418 };
419 unsafe { self.swap_unchecked(a, b); }
420 Some(())
421 }
422
423 pub unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
429 unsafe {
430 let ap = self.ptr_mut().add(a);
431 let bp = self.ptr_mut().add(b);
432 let tmp = ap.read();
433 ap.write(bp.read());
434 bp.write(tmp);
435 }
436 }
437
438 pub fn swap_remove(&mut self, index: usize) -> Option<T> {
440 if index >= self.len.get() {
441 None
442 } else if index == self.len.get() - 1 {
443 self.pop()
444 } else {
445 unsafe { self.swap_unchecked(index, self.len.get() - 1) }
446 self.pop()
447 }
448 }
449
450 #[inline]
451 pub fn shrink_to_fit(&mut self) {
453 if self.len.is_stack() { return }
454
455 if self.len.get() <= N {
456 unsafe { self.switch_to_stack(); }
457 } else {
458 unsafe { self.inner.raw.shrink_to_fit(self.len.get()); };
459 }
460 }
461
462 pub fn push_slice(&mut self, s: &[T])
464 where
465 T: Copy
466 {
467 let len = s.len();
468 let src = s.as_ptr();
469
470 self.reserve(len);
471
472 unsafe {
473 ptr::copy(
474 src,
475 self.ptr_mut().add(self.len.get()),
476 len
477 )
478 }
479
480 self.len.add(len);
481 }
482
483 pub fn extend_from<I>(&mut self, it: I)
485 where
486 I: IntoIterator<Item = T>,
487 {
488 let it = it.into_iter();
489
490 let (min, max) = it.size_hint();
491 let reserve = match max {
492 Some(max) => max,
493 None => min,
494 };
495
496 if reserve > 0 {
497 self.reserve(reserve);
498 }
499
500 for elem in it {
501 unsafe { self.push_unchecked(elem); }
502 }
503 }
504}
505
506impl<T, const N: usize> Default for TinyVec<T, N> {
507 fn default() -> Self {
508 Self::new()
509 }
510}
511
512impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
513 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
514 fmt::Debug::fmt(&self[..], f)
515 }
516}
517
518impl<T: PartialEq, const N: usize> PartialEq for TinyVec<T, N> {
519 fn eq(&self, other: &Self) -> bool {
520 self.deref() == other.deref()
521 }
522}
523
524impl<T, const N: usize> Deref for TinyVec<T, N> {
525 type Target = [T];
526
527 fn deref(&self) -> &Self::Target {
528 unsafe { slice::from_raw_parts(self.ptr(), self.len.get()) }
529 }
530}
531
532
533impl<T, const N: usize> DerefMut for TinyVec<T, N> {
534 fn deref_mut(&mut self) -> &mut Self::Target {
535 unsafe { slice::from_raw_parts_mut(self.ptr_mut(), self.len.get()) }
536 }
537}
538
539impl<T, const N: usize> Drop for TinyVec<T, N> {
540 fn drop(&mut self) {
541 if mem::needs_drop::<T>() {
542 for e in self.deref_mut() {
543 unsafe { ptr::drop_in_place(e) };
544 }
545 }
546 if !self.len.is_stack() {
547 unsafe { self.inner.raw.destroy(); }
548 }
549 }
550}
551
552impl<T, const N: usize> From<Vec<T>> for TinyVec<T, N> {
553 fn from(value: Vec<T>) -> Self {
554 let mut md = ManuallyDrop::new(value);
555 let ptr = unsafe { NonNull::new_unchecked( md.as_mut_ptr() ) };
556 let inner = TinyVecInner {
557 raw: RawVec {
558 cap: md.capacity(),
559 ptr,
560 }
561 };
562
563 Self {
564 inner,
565 len: Length::new_heap(md.len()),
566 }
567 }
568}
569
570impl<T, const N: usize> FromIterator<T> for TinyVec<T, N> {
571 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
572 let iter = iter.into_iter();
573 let cap = match iter.size_hint() {
574 (_, Some(max)) => max,
575 (n, None) => n,
576 };
577 let mut vec = Self::with_capacity(cap);
578 for elem in iter {
579 unsafe { vec.push_unchecked(elem) };
580 }
581 vec
582 }
583}
584
585#[cfg(test)]
586mod test;