tiny_vec/
lib.rs

1/*  Copyright (C) 2025 Saúl Valdelvira
2 *
3 *  This program is free software: you can redistribute it and/or modify
4 *  it under the terms of the GNU General Public License as published by
5 *  the Free Software Foundation, version 3.
6 *
7 *  This program is distributed in the hope that it will be useful,
8 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *  GNU General Public License for more details.
11 *
12 *  You should have received a copy of the GNU General Public License
13 *  along with this program.  If not, see <https://www.gnu.org/licenses/>. */
14
15use core::mem::{self, ManuallyDrop, MaybeUninit};
16use core::ops::{Deref, DerefMut};
17use core::ptr::NonNull;
18use core::{fmt, ptr};
19use core::slice;
20
21mod raw;
22use raw::{next_cap, RawVec};
23
24union TinyVecInner<T, const N: usize> {
25    stack: ManuallyDrop<[MaybeUninit<T>; N]>,
26    raw: RawVec<T>,
27}
28
29impl<T, const N: usize> TinyVecInner<T, N> {
30    unsafe fn as_ptr_stack(&self) -> *const T {
31        unsafe { &self.stack as *const _ as *const T }
32    }
33
34    unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
35        unsafe { &mut self.stack as *mut _ as *mut T }
36    }
37
38    unsafe fn as_ptr_heap(&self) -> *const T {
39        unsafe { self.raw.ptr.as_ptr() }
40    }
41
42    unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
43        unsafe { self.raw.ptr.as_ptr() }
44    }
45}
46
47#[repr(transparent)]
48struct Length(usize);
49
50impl Length {
51    const fn new_heap(len: usize) -> Self {
52        Self(len << 1 | 0b1)
53    }
54
55    #[inline]
56    const fn is_stack(&self) -> bool {
57        (self.0 & 0b1) == 0
58    }
59
60    #[inline]
61    const fn set_heap(&mut self) {
62        self.0 |= 0b1;
63    }
64
65    #[inline]
66    const fn get(&self) -> usize {
67        self.0 >> 1
68    }
69
70    #[inline]
71    const fn add(&mut self, n: usize) {
72        self.0 += n << 1;
73    }
74
75    #[inline]
76    const fn sub(&mut self, n: usize) {
77        self.0 -= n << 1;
78    }
79}
80
81pub struct TinyVec<T, const N: usize> {
82    inner: TinyVecInner<T, N>,
83    len: Length,
84}
85
86impl<T, const N: usize> TinyVec<T, N> {
87    fn ptr(&self) -> *const T {
88        unsafe {
89            if self.len.is_stack() {
90                self.inner.as_ptr_stack()
91            } else {
92                self.inner.as_ptr_heap()
93            }
94        }
95    }
96
97    fn ptr_mut(&mut self) -> *mut T {
98        unsafe {
99            if self.len.is_stack() {
100                self.inner.as_ptr_stack_mut()
101            } else {
102                self.inner.as_ptr_heap_mut()
103            }
104        }
105    }
106
107    unsafe fn switch(&mut self, n: usize) {
108        let cap = next_cap(self.len.get() + n);
109        let vec = RawVec::with_capacity(cap);
110        unsafe {
111            let src = self.inner.as_ptr_stack();
112            let dst = vec.ptr.as_ptr();
113            ptr::copy(
114                src,
115                dst,
116                self.len.get()
117            );
118            self.inner.raw = vec;
119        }
120        self.len.set_heap();
121    }
122}
123
124impl<T, const N: usize> TinyVec<T, N> {
125    pub const fn new() -> Self {
126        let stack = [ const { MaybeUninit::uninit() }; N ];
127        Self {
128            inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
129            len: Length(0),
130        }
131    }
132
133    pub fn with_capacity(cap: usize) -> Self {
134        let mut len = Length(0);
135        let inner = if cap <= N {
136            let s = [ const { MaybeUninit::uninit() }; N ];
137            TinyVecInner { stack: ManuallyDrop::new(s) }
138        } else {
139            len.set_heap();
140            TinyVecInner { raw: RawVec::with_capacity(cap) }
141        };
142
143        Self { inner, len }
144    }
145
146    /// Returns the number of elements inside this vec
147    #[inline]
148    pub const fn len(&self) -> usize { self.len.get() }
149
150    /// Returns true if the vector is empty
151    #[inline]
152    pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
153
154    /// Returns the allocated capacity for this vector
155    pub const fn capacity(&self) -> usize {
156        if self.len.is_stack() {
157            N
158        } else {
159            unsafe { self.inner.raw.cap }
160        }
161    }
162
163    /// Reserves space for, at least, n elements
164    pub fn reserve(&mut self, n: usize) {
165        if self.len.is_stack() {
166            if self.len.get() + n > N {
167                unsafe {
168                    self.switch(n);
169                }
170            }
171        } else {
172            unsafe {
173                self.inner.raw.expand_if_needed(self.len.get(), n);
174            }
175        }
176    }
177
178    /// Appends an element to the back of the vector
179    pub fn push(&mut self, elem: T) {
180        self.reserve(1);
181        unsafe {
182            let dst = self.ptr_mut().add(self.len.get());
183            dst.write(elem);
184        }
185        self.len.add(1);
186    }
187
188    /// Try to push an element inside the vector, only if
189    /// there's room for it. If the push would've caused a
190    /// reallocation of the buffer, returns the value.
191    pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
192        if self.len.get() < self.capacity() {
193            unsafe {
194                // TODO CHECK
195                self.ptr_mut().add(self.len.get()).write(val)
196            }
197            self.len.add(1);
198            Ok(())
199        } else {
200            Err(val)
201        }
202    }
203    /// Removes the last element of this vector (if present)
204    pub fn pop(&mut self) -> Option<T> {
205        if self.len.get() == 0 {
206            None
207        } else {
208            self.len.sub(1);
209            let val =
210            unsafe {
211                self.ptr().add(self.len.get()).read()
212            };
213            Some(val)
214        }
215    }
216
217    /// Inserts an element in the given index position
218    #[inline]
219    pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
220        if index > self.len.get() { return Err(elem) }
221        unsafe { self.insert_unckecked(index, elem); }
222        Ok(())
223    }
224
225    /// # Safety
226    ///
227    /// The index should be valid.
228    pub unsafe fn insert_unckecked(&mut self, index: usize, elem: T) {
229        self.reserve(1);
230
231        unsafe {
232            ptr::copy(
233                self.ptr().add(index),
234                self.ptr_mut().add(index + 1),
235                self.len.get() - index,
236            );
237            self.ptr_mut().add(index).write(elem);
238        }
239        self.len.add(1);
240    }
241
242    /// Reserves space for exactly n elements more
243    pub fn reserve_exact(&mut self, n: usize) {
244        if self.len.is_stack() {
245            if self.len.get() + n > N {
246                unsafe { self.switch(n); }
247            }
248        } else {
249            let vec = unsafe { &mut self.inner.raw };
250            let new_cap = vec.cap.max(self.len.get() + n);
251            vec.expand(new_cap);
252        }
253    }
254
255    /// # Safety
256    /// Index should be < Vec::len()
257    pub unsafe fn remove_unchecked(&mut self, index: usize) -> T {
258        debug_assert!(index < self.len.get(), "Index is >= than {}, this will trigger UB", self.len.get());
259
260        unsafe {
261            self.len.sub(1);
262            let result = self.ptr_mut().add(index).read();
263            ptr::copy(
264                self.ptr().add(index + 1),
265                self.ptr_mut().add(index),
266                self.len.get() - index,
267            );
268            result
269        }
270    }
271
272    #[inline]
273    pub fn remove(&mut self, index: usize) -> Option<T> {
274        if index >= self.len.get() { return None }
275        /* Safety: We've just checked the invariant. Index will always
276         * be < self.len, so it's 100% safe to call this function */
277        Some( unsafe { self.remove_unchecked(index) } )
278    }
279
280    #[inline]
281    pub fn shrink_to_fit(&mut self) {
282        if !self.len.is_stack() {
283            unsafe { self.inner.raw.shrink_to_fit(self.len.get()); }
284        }
285    }
286
287    pub fn push_slice(&mut self, s: &[T]) {
288        let len = s.len();
289        let src = s.as_ptr();
290
291        self.reserve(len);
292
293        unsafe {
294            ptr::copy(
295                src,
296                self.ptr_mut().add(self.len.get()),
297                len
298            )
299        }
300
301        self.len.add(len);
302    }
303
304    pub fn extend_from<I>(&mut self, it: I)
305    where
306        I: IntoIterator<Item = T>,
307    {
308        let it = it.into_iter();
309
310        let (min, max) = it.size_hint();
311        let reserve = match max {
312            Some(max) => max,
313            None => min,
314        };
315
316        if reserve > 0 {
317            self.reserve(reserve);
318        }
319
320        for elem in it {
321            self.push(elem);
322        }
323    }
324}
325
326impl<T, const N: usize> Default for TinyVec<T, N> {
327    fn default() -> Self {
328        Self::new()
329    }
330}
331
332impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
333    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
334        fmt::Debug::fmt(&self[..], f)
335    }
336}
337
338impl<T: PartialEq, const N: usize> PartialEq for TinyVec<T, N> {
339    fn eq(&self, other: &Self) -> bool {
340        self.deref() == other.deref()
341    }
342}
343
344impl<T, const N: usize> Deref for TinyVec<T, N> {
345    type Target = [T];
346
347    fn deref(&self) -> &Self::Target {
348        unsafe { slice::from_raw_parts(self.ptr(), self.len.get()) }
349    }
350}
351
352
353impl<T, const N: usize> DerefMut for TinyVec<T, N> {
354    fn deref_mut(&mut self) -> &mut Self::Target {
355        unsafe { slice::from_raw_parts_mut(self.ptr_mut(), self.len.get()) }
356    }
357}
358
359impl<T, const N: usize> Drop for TinyVec<T, N> {
360    fn drop(&mut self) {
361        if mem::needs_drop::<T>() {
362            for e in self.deref_mut() {
363                unsafe { ptr::drop_in_place(e) };
364            }
365        }
366        if !self.len.is_stack() {
367            unsafe { self.inner.raw.destroy(); }
368        }
369    }
370}
371
372impl<T, const N: usize> From<Vec<T>> for TinyVec<T, N> {
373    fn from(value: Vec<T>) -> Self {
374        let mut md = ManuallyDrop::new(value);
375        let ptr = unsafe { NonNull::new_unchecked( md.as_mut_ptr() ) };
376        let inner = TinyVecInner {
377            raw: RawVec {
378                cap: md.capacity(),
379                ptr,
380            }
381        };
382
383        Self {
384            inner,
385            len: Length::new_heap(md.len()),
386        }
387    }
388}
389
390#[cfg(test)]
391mod test;