1#![allow(incomplete_features)]
62#![cfg_attr(feature = "nightly-const-generics", feature(generic_const_exprs))]
63
64use core::mem::{self, ManuallyDrop, MaybeUninit};
65use core::ops::{Deref, DerefMut};
66use core::ptr::NonNull;
67use core::{fmt, ptr};
68use core::slice;
69
70mod raw;
71use raw::{next_cap, RawVec};
72
73union TinyVecInner<T, const N: usize> {
74 stack: ManuallyDrop<[MaybeUninit<T>; N]>,
75 raw: RawVec<T>,
76}
77
78impl<T, const N: usize> TinyVecInner<T, N> {
79 unsafe fn as_ptr_stack(&self) -> *const T {
80 unsafe { &self.stack as *const _ as *const T }
81 }
82
83 unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
84 unsafe { &mut self.stack as *mut _ as *mut T }
85 }
86
87 unsafe fn as_ptr_heap(&self) -> *const T {
88 unsafe { self.raw.ptr.as_ptr() }
89 }
90
91 unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
92 unsafe { self.raw.ptr.as_ptr() }
93 }
94}
95
96#[repr(transparent)]
97struct Length(usize);
98
99impl Length {
100 const fn new_heap(len: usize) -> Self {
101 Self(len << 1 | 0b1)
102 }
103
104 #[inline]
105 const fn is_stack(&self) -> bool {
106 (self.0 & 0b1) == 0
107 }
108
109 #[inline]
110 const fn set_heap(&mut self) {
111 self.0 |= 0b1;
112 }
113
114 #[inline]
115 const fn get(&self) -> usize {
116 self.0 >> 1
117 }
118
119 #[inline]
120 const fn add(&mut self, n: usize) {
121 self.0 += n << 1;
122 }
123
124 #[inline]
125 const fn sub(&mut self, n: usize) {
126 self.0 -= n << 1;
127 }
128}
129
130pub const fn n_elements_for_stack<T>() -> usize {
146 mem::size_of::<RawVec<T>>() / mem::size_of::<T>()
147}
148
149pub struct TinyVec<T,
151 #[cfg(not(feature = "nightly-const-generics"))]
152 const N: usize,
153
154 #[cfg(feature = "nightly-const-generics")]
155 const N: usize = { n_elements_for_stack::<T>() },
156> {
157 inner: TinyVecInner<T, N>,
158 len: Length,
159}
160
161impl<T, const N: usize> TinyVec<T, N> {
162
163 fn ptr(&self) -> *const T {
164 unsafe {
165 if self.len.is_stack() {
166 self.inner.as_ptr_stack()
167 } else {
168 self.inner.as_ptr_heap()
169 }
170 }
171 }
172
173 fn ptr_mut(&mut self) -> *mut T {
174 unsafe {
175 if self.len.is_stack() {
176 self.inner.as_ptr_stack_mut()
177 } else {
178 self.inner.as_ptr_heap_mut()
179 }
180 }
181 }
182
183 unsafe fn switch(&mut self, n: usize) {
184 let cap = next_cap(self.len.get() + n);
185 let vec = RawVec::with_capacity(cap);
186 unsafe {
187 let src = self.inner.as_ptr_stack();
188 let dst = vec.ptr.as_ptr();
189 ptr::copy(
190 src,
191 dst,
192 self.len.get()
193 );
194 self.inner.raw = vec;
195 }
196 self.len.set_heap();
197 }
198}
199
200impl<T, const N: usize> TinyVec<T, N> {
201
202 pub const fn new() -> Self {
203 let stack = [ const { MaybeUninit::uninit() }; N ];
204 Self {
205 inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
206 len: Length(0),
207 }
208 }
209
210 pub fn with_capacity(cap: usize) -> Self {
211 let mut len = Length(0);
212 let inner = if cap <= N {
213 let s = [ const { MaybeUninit::uninit() }; N ];
214 TinyVecInner { stack: ManuallyDrop::new(s) }
215 } else {
216 len.set_heap();
217 TinyVecInner { raw: RawVec::with_capacity(cap) }
218 };
219
220 Self { inner, len }
221 }
222
223 #[inline]
225 pub const fn len(&self) -> usize { self.len.get() }
226
227 #[inline]
229 pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
230
231 pub const fn capacity(&self) -> usize {
233 if self.len.is_stack() {
234 N
235 } else {
236 unsafe { self.inner.raw.cap }
237 }
238 }
239
240 pub fn reserve(&mut self, n: usize) {
242 if self.len.is_stack() {
243 if self.len.get() + n > N {
244 unsafe {
245 self.switch(n);
246 }
247 }
248 } else {
249 unsafe {
250 self.inner.raw.expand_if_needed(self.len.get(), n);
251 }
252 }
253 }
254
255 pub fn push(&mut self, elem: T) {
257 self.reserve(1);
258 unsafe {
259 let dst = self.ptr_mut().add(self.len.get());
260 dst.write(elem);
261 }
262 self.len.add(1);
263 }
264
265 pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
269 if self.len.get() < self.capacity() {
270 unsafe {
271 self.ptr_mut().add(self.len.get()).write(val)
273 }
274 self.len.add(1);
275 Ok(())
276 } else {
277 Err(val)
278 }
279 }
280 pub fn pop(&mut self) -> Option<T> {
282 if self.len.get() == 0 {
283 None
284 } else {
285 self.len.sub(1);
286 let val =
287 unsafe {
288 self.ptr().add(self.len.get()).read()
289 };
290 Some(val)
291 }
292 }
293
294 #[inline]
296 pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
297 if index > self.len.get() { return Err(elem) }
298 unsafe { self.insert_unckecked(index, elem); }
299 Ok(())
300 }
301
302 pub unsafe fn insert_unckecked(&mut self, index: usize, elem: T) {
306 self.reserve(1);
307
308 unsafe {
309 ptr::copy(
310 self.ptr().add(index),
311 self.ptr_mut().add(index + 1),
312 self.len.get() - index,
313 );
314 self.ptr_mut().add(index).write(elem);
315 }
316 self.len.add(1);
317 }
318
319 pub fn reserve_exact(&mut self, n: usize) {
321 if self.len.is_stack() {
322 if self.len.get() + n > N {
323 unsafe { self.switch(n); }
324 }
325 } else {
326 let vec = unsafe { &mut self.inner.raw };
327 let new_cap = vec.cap.max(self.len.get() + n);
328 vec.expand(new_cap);
329 }
330 }
331
332 pub unsafe fn remove_unchecked(&mut self, index: usize) -> T {
335 debug_assert!(index < self.len.get(), "Index is >= than {}, this will trigger UB", self.len.get());
336
337 unsafe {
338 self.len.sub(1);
339 let result = self.ptr_mut().add(index).read();
340 ptr::copy(
341 self.ptr().add(index + 1),
342 self.ptr_mut().add(index),
343 self.len.get() - index,
344 );
345 result
346 }
347 }
348
349 #[inline]
350 pub fn remove(&mut self, index: usize) -> Option<T> {
351 if index >= self.len.get() { return None }
352 Some( unsafe { self.remove_unchecked(index) } )
355 }
356
357 #[inline]
358 pub fn shrink_to_fit(&mut self) {
359 if !self.len.is_stack() {
360 unsafe { self.inner.raw.shrink_to_fit(self.len.get()); }
361 }
362 }
363
364 pub fn push_slice(&mut self, s: &[T]) {
365 let len = s.len();
366 let src = s.as_ptr();
367
368 self.reserve(len);
369
370 unsafe {
371 ptr::copy(
372 src,
373 self.ptr_mut().add(self.len.get()),
374 len
375 )
376 }
377
378 self.len.add(len);
379 }
380
381 pub fn extend_from<I>(&mut self, it: I)
382 where
383 I: IntoIterator<Item = T>,
384 {
385 let it = it.into_iter();
386
387 let (min, max) = it.size_hint();
388 let reserve = match max {
389 Some(max) => max,
390 None => min,
391 };
392
393 if reserve > 0 {
394 self.reserve(reserve);
395 }
396
397 for elem in it {
398 self.push(elem);
399 }
400 }
401}
402
403impl<T, const N: usize> Default for TinyVec<T, N> {
404 fn default() -> Self {
405 Self::new()
406 }
407}
408
409impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
410 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
411 fmt::Debug::fmt(&self[..], f)
412 }
413}
414
415impl<T: PartialEq, const N: usize> PartialEq for TinyVec<T, N> {
416 fn eq(&self, other: &Self) -> bool {
417 self.deref() == other.deref()
418 }
419}
420
421impl<T, const N: usize> Deref for TinyVec<T, N> {
422 type Target = [T];
423
424 fn deref(&self) -> &Self::Target {
425 unsafe { slice::from_raw_parts(self.ptr(), self.len.get()) }
426 }
427}
428
429
430impl<T, const N: usize> DerefMut for TinyVec<T, N> {
431 fn deref_mut(&mut self) -> &mut Self::Target {
432 unsafe { slice::from_raw_parts_mut(self.ptr_mut(), self.len.get()) }
433 }
434}
435
436impl<T, const N: usize> Drop for TinyVec<T, N> {
437 fn drop(&mut self) {
438 if mem::needs_drop::<T>() {
439 for e in self.deref_mut() {
440 unsafe { ptr::drop_in_place(e) };
441 }
442 }
443 if !self.len.is_stack() {
444 unsafe { self.inner.raw.destroy(); }
445 }
446 }
447}
448
449impl<T, const N: usize> From<Vec<T>> for TinyVec<T, N> {
450 fn from(value: Vec<T>) -> Self {
451 let mut md = ManuallyDrop::new(value);
452 let ptr = unsafe { NonNull::new_unchecked( md.as_mut_ptr() ) };
453 let inner = TinyVecInner {
454 raw: RawVec {
455 cap: md.capacity(),
456 ptr,
457 }
458 };
459
460 Self {
461 inner,
462 len: Length::new_heap(md.len()),
463 }
464 }
465}
466
467#[cfg(test)]
468mod test;