without_alloc/fixed_vec.rs
1//! Contains the `FixedVec` implementation.
2//!
3//! [See `FixedVec` for the main information][`FixedVec`].
4//!
5//! [`FixedVec`]: struct.FixedVec.html
6use core::{borrow, cmp, hash, iter, ops, ptr, slice};
7use crate::uninit::Uninit;
8
9/// A `Vec`-like structure that does not manage its allocation.
10///
11/// This vector type will never (re-)allocate but it can also not free underused memory. As opposed
12/// to other similar crates, it does require and additional bounds on its type parameter as it
13/// truly manages uninitialized memory to store instances.
14///
15/// # Basic Usage
16///
17/// It is easy to use a local array or slice of `MaybeUninit` for the storage of a `FixedVec`. Note
18/// that, similar to the allocated standard `Vec`, the underlying memory not being stored inline
19/// makes moves and splitting much cheaper.
20///
21/// ```
22/// use core::mem::MaybeUninit;
23/// use without_alloc::FixedVec;
24///
25/// let mut memory: [MaybeUninit<usize>; 15] = [MaybeUninit::uninit(); 15];
26/// let mut stack = FixedVec::new((&mut memory[..]).into());
27///
28/// stack.push(1);
29/// stack.push(2);
30/// stack.push(3);
31/// while let Some(top) = stack.pop() {
32/// // Prints 3, 2, 1
33/// println!("{}", top);
34/// }
35/// ```
36///
37/// ## With `Bump`
38///
39/// One focus of the library is composability. It should not be surprising that `FixedVec`
40/// interacts with the [`LocalAlloc`] allocators, which implements a specialized interface
41/// providing the [`Uninit`] type instead of a raw `*const u8`, through an extension trait. Hence,
42/// the `FixedVec` can use this instead of its own local stack variables.
43///
44/// ```
45/// use static_alloc::Bump;
46/// use without_alloc::{FixedVec, alloc::LocalAllocLeakExt};
47///
48/// let alloc: Bump<[u8; 1 << 12]> = Bump::uninit();
49/// let some_usize = alloc.leak(0_usize).unwrap();
50///
51/// // Allocate a vector with capacity `1` from the slab.
52/// let mut vec = alloc.fixed_vec(1).unwrap();
53///
54/// // Push the reference to the other allocation.
55/// vec.push(&mut *some_usize);
56///
57/// // … do something else
58///
59/// // Ensure lifetimes work out.
60/// drop(vec);
61///
62/// // Hooray, now once again unborrowed.
63/// *some_usize = 0;
64/// ```
65///
66/// ## The [`from_unaligned`] constructor
67///
68/// It is possible to place a `FixedVec` into an uninitialized memory, not only the `Uninit<[T]>`
69/// that the [`new`] constructor requires. This will align the underlying memory suitably and
70/// substitute a dangling empty slice if that is not possible.
71///
72/// ```
73/// use core::mem::MaybeUninit;
74/// use without_alloc::{FixedVec, Uninit};
75///
76/// struct MyStruct {
77/// // ..
78/// # _private: [usize; 1],
79/// }
80///
81/// let mut memory: MaybeUninit<[u8; 1024]> = MaybeUninit::uninit();
82/// let uninit = Uninit::from(&mut memory);
83///
84/// // NO guarantees about space lost from required additional aligning.
85/// let mut vec: FixedVec<MyStruct> = FixedVec::from_unaligned(uninit);
86/// ```
87///
88/// [`Bump`]: ../slab/struct.Bump.html
89/// [`Uninit`]: ../uninit/struct.Uninit.html
90/// [`new`]: #method.new
91/// [`from_unaligned`]: #method.from_unaligned
92pub struct FixedVec<'a, T> {
93 uninit: Uninit<'a, [T]>,
94 length: usize,
95}
96
97/// An iterator removing a range of elements from a `FixedVec`.
98///
99/// See [`FixedVec::drain`] for more information.
100///
101/// [`FixedVec::drain`]: struct.FixedVec.html#method.drain
102// Internal invariant: `0 <= start <= end <= tail`
103pub struct Drain<'a, T> {
104 /// Number of elements drained from the start of the slice.
105 start: usize,
106 /// The end of the elements to drain (relative to `elements`), inbounds offset for `elements`.
107 end: usize,
108 /// The start of the tail of elements (relative to `elements`), inbounds offset for `elements`.
109 tail: usize,
110 /// The length of the tail.
111 tail_len: usize,
112 /// Pointer to first element to drain (and to write to on `Drop`).
113 elements: ptr::NonNull<T>,
114 /// The length field of the underlying `FixedVec`.
115 len: &'a mut usize,
116}
117
118impl<T> FixedVec<'_, T> {
119 /// Shorten the vector to a maximum length.
120 ///
121 /// If the length is not larger than `len` this has no effect.
122 ///
123 /// The tail of the vector is dropped starting from the last element. This order is opposite to
124 /// `.drain(len..).for_each(drop)`. `truncate` provides the extra guarantee that a `panic`
125 /// during `Drop` of one element effectively stops the truncation at that point, instead of
126 /// leaking unspecified other content of the vector. This means that other elements are still
127 /// dropped when unwinding eventually drops the `FixedVec` itself.
128 ///
129 /// ## Example
130 ///
131 /// ```
132 /// # use core::mem::MaybeUninit;
133 /// # use without_alloc::{FixedVec, Uninit};
134 ///
135 /// let mut memory: [MaybeUninit<usize>; 4] = [MaybeUninit::uninit(); 4];
136 /// let mut vec = FixedVec::new(Uninit::from(&mut memory[..]));
137 ///
138 /// vec.push(0usize);
139 /// vec.push(1usize);
140 /// vec.push(2usize);
141 ///
142 /// assert_eq!(vec.as_slice(), [0, 1, 2]);
143 /// vec.truncate(1);
144 /// assert_eq!(vec.as_slice(), [0]);
145 /// ```
146 pub fn truncate(&mut self, len: usize) {
147 struct SetLenOnDrop<'a> {
148 len: &'a mut usize,
149 local_len: usize,
150 }
151
152 impl Drop for SetLenOnDrop<'_> {
153 fn drop(&mut self) {
154 *self.len = self.local_len;
155 }
156 }
157
158 let mut ptr = self.end_mut_ptr();
159 let current_length = self.length;
160 let mut set_len = SetLenOnDrop { len: &mut self.length, local_len: current_length };
161
162 for _ in len..current_length {
163 set_len.local_len -= 1;
164
165 unsafe {
166 ptr = ptr.offset(-1);
167 ptr::drop_in_place(ptr);
168 }
169 }
170 }
171
172 /// Extracts a slice containing the entire vector.
173 pub fn as_slice(&self) -> &[T] {
174 unsafe {
175 // SAFETY: length is the number of initialized elements.
176 slice::from_raw_parts(self.uninit.as_begin_ptr(), self.length)
177 }
178 }
179
180 /// Extracts the mutable slice containing the entire vector.
181 pub fn as_mut_slice(&mut self) -> &mut [T] {
182 unsafe {
183 // SAFETY:
184 // * length is the number of initialized elements.
185 // * unaliased since we take ourselves by `mut` and `uninit` does the rest.
186 slice::from_raw_parts_mut(self.uninit.as_begin_ptr(), self.length)
187 }
188 }
189
190 /// Remove all elements.
191 ///
192 /// This is an alias for [`truncate(0)`][truncate].
193 ///
194 /// [truncate]: #method.truncate
195 pub fn clear(&mut self) {
196 self.truncate(0)
197 }
198
199 /// Returns the number of elements in the vector.
200 pub fn len(&self) -> usize {
201 self.length
202 }
203
204 /// Set the raw length.
205 ///
206 /// ## Safety
207 /// * `new_len` must be smaller or equal `self.capacity()`
208 /// * The caller must ensure that all newly referenced elements are properly initialized.
209 pub unsafe fn set_len(&mut self, new_len: usize) {
210 self.length = new_len;
211 }
212
213 /// Returns the number of elements the vector can hold.
214 pub fn capacity(&self) -> usize {
215 self.uninit.capacity()
216 }
217
218 /// Returns `true` if the vector contains no elements.
219 pub fn is_empty(&self) -> bool {
220 self.length == 0
221 }
222
223 /// Appends an element to the back of a collection.
224 ///
225 /// Return `Err(val)` if it is not possible to append the element.
226 ///
227 /// ```
228 /// use core::mem::MaybeUninit;
229 /// use without_alloc::{FixedVec, Uninit};
230 ///
231 /// // Only enough storage for one element.
232 /// let mut allocation: [MaybeUninit<u32>; 1] = [MaybeUninit::uninit()];
233 /// let mut vec = FixedVec::new(Uninit::from(&mut allocation[..]));
234 ///
235 /// // First push succeeds.
236 /// assert_eq!(vec.push(1), Ok(()));
237 ///
238 /// // The second push fails.
239 /// assert_eq!(vec.push(2), Err(2));
240 ///
241 /// ```
242 pub fn push(&mut self, val: T) -> Result<(), T> {
243 if self.length == usize::max_value() {
244 return Err(val);
245 }
246
247 let init = match self.head_tail_mut().1.split_first() {
248 Some(init) => init,
249 None => return Err(val),
250 };
251
252 init.init(val);
253 self.length += 1;
254
255 Ok(())
256 }
257
258 /// Removes the last element from a vector and returns it, or `None` if it is empty.
259 pub fn pop(&mut self) -> Option<T> {
260 if self.length == 0 {
261 return None;
262 }
263
264 let last = self.head_tail_mut().0.split_last().unwrap();
265 let val = unsafe {
266 // SAFETY: initialized and no reference of any kind exists to it.
267 ptr::read(last.as_ptr())
268 };
269
270 self.length -= 1;
271 Some(val)
272 }
273
274 /// Split the capacity into a *borrowed* other vector.
275 ///
276 /// The other vector borrows the underlying memory resource while it is alive.
277 ///
278 /// This is a specialized method not found in the standard `Vec` as it relies on `FixedVec` not
279 /// owning the allocation itself. This avoids splitting the underlying allocation which would
280 /// require `unsafe` to mend the parts together.
281 ///
282 /// ## Panics
283 /// This method panics if `at > self.capacity()`.
284 ///
285 /// ## Examples
286 ///
287 /// ```
288 /// use without_alloc::{FixedVec, alloc::LocalAllocLeakExt};
289 /// use static_alloc::Bump;
290 ///
291 /// let mut memory: Bump<[usize; 8]> = Bump::uninit();
292 /// let mut vec = memory.fixed_vec::<usize>(8).unwrap();
293 /// vec.fill(0..7);
294 ///
295 /// // Can use like a vector:
296 /// let mut part = vec.split_borrowed(4);
297 /// assert!(part.push(7).is_ok());
298 /// assert!((4..8).eq(part.drain(..)));
299 ///
300 /// // Drop to rescind the borrow on `vec`.
301 /// drop(part);
302 ///
303 /// // All split elements are gone
304 /// assert_eq!(vec.len(), 4);
305 /// // But retained all capacity
306 /// assert_eq!(vec.capacity(), 8);
307 /// ```
308 #[must_use = "Elements in the split tail will be dropped. Prefer `truncate(at)` or `drain(at..)` if there is no other use."]
309 pub fn split_borrowed(&mut self, at: usize) -> FixedVec<'_, T> {
310 assert!(at <= self.capacity(), "`at` out of bounds");
311 let new_uninit = self.uninit.borrow_mut().split_at(at).unwrap();
312 let new_len = self.length.saturating_sub(at);
313 self.length -= new_len;
314 FixedVec {
315 uninit: new_uninit,
316 length: new_len,
317 }
318 }
319
320 /// Split the capacity of the collection into two at a given index.
321 ///
322 /// In contrast to `Vec::split_off` calling this method reduces the capacity of `self` to `at`.
323 ///
324 /// ## Panics
325 /// This method panics if `at > self.capacity()`.
326 pub fn split_and_shrink_to(&mut self, at: usize) -> Self {
327 assert!(at <= self.capacity(), "`at` out of bounds");
328 // `self.length` is always smaller than `split_at`.
329 let new_uninit = self.uninit.split_at(at).unwrap();
330 // The first `at` elements stay in this vec.
331 let new_len = self.length.saturating_sub(at);
332 self.length -= new_len;
333 FixedVec {
334 uninit: new_uninit,
335 length: new_len,
336 }
337 }
338
339 /// Extend the vector with as many elements as fit.
340 ///
341 /// Returns the iterator with all elements that were not pushed into the vector.
342 pub fn fill<I: IntoIterator<Item = T>>(&mut self, iter: I)
343 -> I::IntoIter
344 {
345 let unused = self.capacity() - self.len();
346 let mut iter = iter.into_iter();
347 for item in iter.by_ref().take(unused) {
348 unsafe {
349 // SAFETY:
350 // `capacity != len` so this is strictly in bounds. Also, this is behind the
351 // vector so there can not be any references to it currently.
352 ptr::write(self.end_mut_ptr(), item);
353 // SAFETY:
354 // Just initialized one more element past the end. By the way, this can not
355 // overflow since the result is at most `self.capacity()`.
356 self.set_len(self.len() + 1);
357 }
358 }
359 iter
360 }
361
362 /// Creates a draining iterator that yields and removes elements a given range.
363 ///
364 /// It is unspecified which elements are removed if the `Drain` is never dropped. If you
365 /// require precise semantics even in this case you might be able to swap the range to the back
366 /// of the vector and invoke [`split_and_shrink_to`].
367 ///
368 /// [`split_and_shrink_to`]: #method.split_and_shrink_to
369 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
370 where R: ops::RangeBounds<usize>
371 {
372 let len = self.len();
373 let start = match range.start_bound() {
374 ops::Bound::Included(&n) => n,
375 ops::Bound::Excluded(&n) => n + 1,
376 ops::Bound::Unbounded => 0,
377 };
378 let end = match range.end_bound() {
379 ops::Bound::Included(&n) => n + 1,
380 ops::Bound::Excluded(&n) => n,
381 ops::Bound::Unbounded => len,
382 };
383 assert!(start <= end);
384 assert!(end <= len);
385
386 let elements = unsafe {
387 // SAFETY:
388 // Within allocation since `start <= len` and len is at most the
389 // one-past-the-end pointer. Pointer within are also never null.
390 //
391 // In particular we can shorten the length. We initially shorten
392 // the length until all elements are drained. The Drain will
393 // increase the length by `len - end` elements which will still be
394 // within the bounds of the allocation as `start <= end`.
395 self.set_len(start);
396 ptr::NonNull::new_unchecked(self.as_mut_ptr().add(start))
397 };
398
399 Drain {
400 // Internal invariant: `count <= tail`.
401 start: 0,
402 // Relative to `elements`. inbounds of original `as_mut_ptr()`.
403 end: end - start,
404 tail: end - start,
405 tail_len: len - end,
406 elements,
407 len: &mut self.length,
408 }
409 }
410
411 fn head_tail_mut(&mut self) -> (Uninit<'_, [T]>, Uninit<'_, [T]>) {
412 // Borrow, do not affect the actual allocation by throwing away possible elements.
413 let mut all = self.uninit.borrow_mut();
414 // This must always be possible. `self.length` is nevery greater than the capacity.
415 let tail = all.split_at(self.length).unwrap();
416 (all, tail)
417 }
418
419 /// Get a read-only pointer, valid for the initialized and uninitialized portion of the raw
420 /// vector.
421 pub fn as_ptr(&mut self) -> *const T {
422 self.uninit.as_begin_ptr()
423 }
424
425 /// Get a mutable pointer, valid for the initialized and uninitialized portion of the raw
426 /// vector.
427 pub fn as_mut_ptr(&mut self) -> *mut T {
428 self.uninit.as_begin_ptr()
429 }
430
431 fn end_mut_ptr(&mut self) -> *mut T {
432 unsafe { self.as_mut_ptr().add(self.length) }
433 }
434}
435
436impl<'a, T> FixedVec<'a, T> {
437 /// Create a `FixedVec` in a pre-allocated region.
438 ///
439 /// The capacity will be that of the underlying allocation.
440 pub fn new(uninit: Uninit<'a, [T]>) -> Self {
441 FixedVec {
442 uninit,
443 length: 0,
444 }
445 }
446
447 /// Create a `FixedVec` with as large of a capacity as available.
448 ///
449 /// When no aligned slice can be create within the provided memory then the constructor will
450 /// fallback to an empty dangling slice.
451 ///
452 /// This is only a utility function which may be lossy as data before the alignment is
453 /// discarded. Prefer an explicit [`Uninit::cast_slice`] followed by error handling if this is
454 /// undesirable.
455 ///
456 /// [`Uninit::cast_slice`]: ../uninit/struct.Uninit.html#method.cast_slice
457 pub fn from_unaligned<U: ?Sized>(generic: Uninit<'a, U>) -> Self {
458 let mut uninit = generic.as_memory();
459 let slice = uninit.split_slice().unwrap_or_else(Uninit::empty);
460 Self::new(slice)
461 }
462
463 /// Return trailing bytes that can not be used by the `FixedVec`.
464 ///
465 /// This operation is idempotent.
466 pub fn shrink_to_fit(&mut self) -> Uninit<'a, ()> {
467 self.uninit.shrink_to_fit()
468 }
469}
470
471impl<T> Drain<'_, T> {
472 /// View the remaining data as a slice.
473 ///
474 /// Similar to `slice::Iter::as_slice` but you are not allowed to use the iterator as it will
475 /// invalidate the pointees. This is an extended form of `Peekable::peek`.
476 pub fn as_slice(&self) -> &[T] {
477 unsafe {
478 // SAFETY: all indices up to `tail` are inbounds. Internal invariant guarantees `start`
479 // is smaller.
480 slice::from_raw_parts(
481 self.elements.as_ptr().add(self.start),
482 self.len())
483 }
484 }
485
486 /// View the remaining data as a mutable slice.
487 ///
488 /// This is `Peekable::peek` on steroids.
489 pub fn as_mut_slice(&mut self) -> &mut [T] {
490 unsafe {
491 // SAFETY: all indices up to `tail` are inbounds. Internal invariant guarantees `start`
492 // is smaller. Not aliased as it mutably borrows the `Drain`.
493 slice::from_raw_parts_mut(
494 self.elements.as_ptr().add(self.start),
495 self.len())
496 }
497 }
498
499 /// The count of remaining elements to drain.
500 pub fn len(&self) -> usize {
501 self.end - self.start
502 }
503
504 /// If there are any elements remaining.
505 pub fn is_empty(&self) -> bool {
506 self.start == self.end
507 }
508}
509
510impl<T> ops::Deref for FixedVec<'_, T> {
511 type Target = [T];
512 fn deref(&self) -> &[T] {
513 self.as_slice()
514 }
515}
516
517impl<T> ops::DerefMut for FixedVec<'_, T> {
518 fn deref_mut(&mut self) -> &mut [T] {
519 self.as_mut_slice()
520 }
521}
522
523impl<T> Drop for FixedVec<'_, T> {
524 fn drop(&mut self) {
525 unsafe {
526 ptr::drop_in_place(self.as_mut_slice())
527 }
528 }
529}
530
531impl<T, I> ops::Index<I> for FixedVec<'_, T>
532 where I: slice::SliceIndex<[T]>,
533{
534 type Output = I::Output;
535
536 fn index(&self, idx: I) -> &I::Output {
537 ops::Index::index(&**self, idx)
538 }
539}
540
541impl<T, I> ops::IndexMut<I> for FixedVec<'_, T>
542 where I: slice::SliceIndex<[T]>,
543{
544 fn index_mut(&mut self, idx: I) -> &mut I::Output {
545 ops::IndexMut::index_mut(&mut**self, idx)
546 }
547}
548
549impl<'a, 'b, T: PartialEq> PartialEq<FixedVec<'b, T>> for FixedVec<'a, T> {
550 #[inline]
551 fn eq(&self, other: &FixedVec<T>) -> bool {
552 PartialEq::eq(&**self, &**other)
553 }
554 #[inline]
555 fn ne(&self, other: &FixedVec<T>) -> bool {
556 PartialEq::ne(&**self, &**other)
557 }
558}
559
560impl<'a, 'b, T: PartialOrd> PartialOrd<FixedVec<'b, T>> for FixedVec<'a, T> {
561 #[inline]
562 fn partial_cmp(&self, other: &FixedVec<T>) -> Option<cmp::Ordering> {
563 PartialOrd::partial_cmp(&**self, &**other)
564 }
565 #[inline]
566 fn lt(&self, other: &FixedVec<T>) -> bool {
567 PartialOrd::lt(&**self, &**other)
568 }
569 #[inline]
570 fn le(&self, other: &FixedVec<T>) -> bool {
571 PartialOrd::le(&**self, &**other)
572 }
573 #[inline]
574 fn ge(&self, other: &FixedVec<T>) -> bool {
575 PartialOrd::ge(&**self, &**other)
576 }
577 #[inline]
578 fn gt(&self, other: &FixedVec<T>) -> bool {
579 PartialOrd::gt(&**self, &**other)
580 }
581}
582
583impl<T: Ord> Ord for FixedVec<'_, T> {
584 #[inline]
585 fn cmp(&self, other: &FixedVec<T>) -> cmp::Ordering {
586 Ord::cmp(&**self, &**other)
587 }
588}
589
590impl<T: Eq> Eq for FixedVec<'_, T> { }
591
592impl<T: hash::Hash> hash::Hash for FixedVec<'_, T> {
593 fn hash<H: hash::Hasher>(&self, state: &mut H) {
594 hash::Hash::hash(&**self, state)
595 }
596}
597
598impl<T> borrow::Borrow<[T]> for FixedVec<'_, T> {
599 fn borrow(&self) -> &[T] {
600 &**self
601 }
602}
603
604impl<T> borrow::BorrowMut<[T]> for FixedVec<'_, T> {
605 fn borrow_mut(&mut self) -> &mut [T] {
606 &mut **self
607 }
608}
609
610impl<T> AsRef<[T]> for FixedVec<'_, T> {
611 fn as_ref(&self) -> &[T] {
612 &**self
613 }
614}
615
616impl<T> AsMut<[T]> for FixedVec<'_, T> {
617 fn as_mut(&mut self) -> &mut [T] {
618 &mut **self
619 }
620}
621
622impl<T> Iterator for Drain<'_, T> {
623 type Item = T;
624
625 fn next(&mut self) -> Option<T> {
626 if Drain::is_empty(self) {
627 return None;
628 }
629
630 let t = unsafe {
631 // SAFETY: `count <= self.tail` and `tail` is always in bounds.
632 ptr::read(self.elements.as_ptr().add(self.start))
633 };
634
635 self.start += 1;
636 Some(t)
637 }
638
639 fn size_hint(&self) -> (usize, Option<usize>) {
640 (self.start..self.end).size_hint()
641 }
642}
643
644impl<T> DoubleEndedIterator for Drain<'_, T> {
645 fn next_back(&mut self) -> Option<T> {
646 if Drain::is_empty(self) {
647 return None;
648 }
649
650 let t = unsafe {
651 // SAFETY: `end <= self.tail` and `tail` is always in bounds.
652 ptr::read(self.elements.as_ptr().add(self.end - 1))
653 };
654
655 self.end -= 1;
656 Some(t)
657 }
658}
659
660impl<T> ExactSizeIterator for Drain<'_, T> {
661 fn len(&self) -> usize {
662 Drain::len(self)
663 }
664}
665
666impl<T> iter::FusedIterator for Drain<'_, T> { }
667
668impl<T> Drop for Drain<'_, T> {
669 fn drop(&mut self) {
670 self.for_each(drop);
671
672 if self.tail_len != 0 {
673 unsafe {
674 let source = self.elements.as_ptr().add(self.tail);
675 ptr::copy(source, self.elements.as_ptr(), self.tail_len);
676 }
677 // Restore the tail to the vector.
678 *self.len += self.tail_len;
679 }
680 }
681}
682
683/// Extend the vector to the extent the allocation allows it.
684///
685/// Appends elements from the iterator and panics if the iterator contains more elements than the
686/// capacity of the vector. See [`fill`] for a specialized method that does not further exhaust the
687/// iterator and does not panic.
688///
689/// ## Examples
690///
691/// To avoid panicking you can limit the iterator to the remaining capacity. Depending on the
692/// underlying iterator it will exhaust itself further when the iterator itself is dropped.
693///
694/// ```
695/// # use core::mem::MaybeUninit;
696/// # use without_alloc::FixedVec;
697/// let mut memory: [MaybeUninit<usize>; 15] = [MaybeUninit::uninit(); 15];
698/// let mut source_vec = FixedVec::new((&mut memory[..]).into());
699/// source_vec.extend(0..15);
700///
701/// let mut memory: [MaybeUninit<usize>; 3] = [MaybeUninit::uninit(); 3];
702/// let mut target = FixedVec::new((&mut memory[..]).into());
703/// target.extend(source_vec.drain(..).take(3));
704///
705/// assert!(source_vec.is_empty());
706/// assert_eq!(target.len(), target.capacity());
707/// ```
708///
709/// If the iterator is not limited to the number of elements then this method will panic.
710///
711/// ```should_panic
712/// # use core::mem::MaybeUninit;
713/// # use without_alloc::FixedVec;
714/// let mut memory: [MaybeUninit<usize>; 3] = [MaybeUninit::uninit(); 3];
715/// let mut vec = FixedVec::new((&mut memory[..]).into());
716/// vec.extend(0..);
717/// ```
718///
719/// [`fill`]: #method.fill
720impl<T> iter::Extend<T> for FixedVec<'_, T> {
721 fn extend<I>(&mut self, iter: I)
722 where I: IntoIterator<Item=T>,
723 {
724 for _spill in self.fill(iter) {
725 panic!("FixedVec memory exhausted");
726 }
727 }
728}
729
730#[cfg(test)]
731mod tests {
732 use super::FixedVec;
733 use crate::Uninit;
734
735 use core::cell::Cell;
736 use core::mem::MaybeUninit;
737 use core::sync::atomic::{AtomicUsize, Ordering};
738
739 #[derive(Debug)]
740 struct Trigger<'a> {
741 panic_on_drop: bool,
742 dropped_counter: &'a Cell<usize>,
743 }
744
745 impl Drop for Trigger<'_> {
746 fn drop(&mut self) {
747 if self.panic_on_drop { panic!("Trigger triggered") }
748 // Record this as a normal drop.
749 self.dropped_counter.set(self.dropped_counter.get() + 1);
750 }
751 }
752
753 struct AbortMismatchedDropCount<'a> {
754 counter: &'a Cell<usize>,
755 expected: usize,
756 }
757
758 impl Drop for AbortMismatchedDropCount<'_> {
759 fn drop(&mut self) {
760 struct ForceDupPanic;
761
762 impl Drop for ForceDupPanic {
763 fn drop(&mut self) { panic!() }
764 }
765
766 if self.expected != self.counter.get() {
767 // For duplicate panic, and thus abort
768 let _x = ForceDupPanic;
769 panic!();
770 }
771 }
772 }
773
774 #[test]
775 fn init_and_use() {
776 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
777 struct Foo(usize);
778
779 const CAPACITY: usize = 30;
780
781 let mut allocation: [MaybeUninit<Foo>; 30] = [MaybeUninit::uninit(); 30];
782 let mut vec = FixedVec::new((&mut allocation[..]).into());
783
784 assert_eq!(vec.capacity(), CAPACITY);
785 assert_eq!(vec.len(), 0);
786 for i in 0..CAPACITY {
787 assert_eq!(vec.push(Foo(i)), Ok(()));
788 }
789
790 assert_eq!(vec.capacity(), CAPACITY);
791 assert_eq!(vec.len(), CAPACITY);
792
793 for i in (0..CAPACITY).rev() {
794 assert_eq!(vec.pop(), Some(Foo(i)));
795 }
796
797 assert_eq!(vec.capacity(), CAPACITY);
798 assert_eq!(vec.len(), 0);
799 }
800
801 #[test]
802 fn zst_drop() {
803 const COUNT: usize = 30;
804 static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
805 struct HasDrop(usize);
806
807 impl Drop for HasDrop {
808 fn drop(&mut self) {
809 DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
810 }
811 }
812
813
814 let mut allocation: MaybeUninit<[HasDrop; COUNT]> = MaybeUninit::uninit();
815 let uninit = Uninit::from_maybe_uninit(&mut allocation);
816 let mut vec = FixedVec::new(uninit.cast_slice().unwrap());
817
818 for i in 0..COUNT {
819 assert!(vec.push(HasDrop(i)).is_ok());
820 }
821
822 drop(vec);
823 assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), COUNT);
824 }
825
826 #[test]
827 fn zst() {
828 struct Zst;
829 let vec = FixedVec::<Zst>::new(Uninit::empty());
830 assert_eq!(vec.capacity(), usize::max_value());
831 }
832
833 #[test]
834 fn split_and_shrink() {
835 // Zeroed because we want to test the contents.
836 let mut allocation: MaybeUninit<[u16; 8]> = MaybeUninit::zeroed();
837
838 let mut aligned = Uninit::from(&mut allocation).as_memory();
839 let _ = aligned.split_at_byte(15);
840
841 let mut vec = FixedVec::new(aligned.cast_slice().unwrap());
842 let mut second = vec.split_and_shrink_to(4);
843 let tail = second.shrink_to_fit();
844
845 assert_eq!(vec.capacity(), 4);
846 assert_eq!(vec.shrink_to_fit().size(), 0);
847 assert_eq!(second.capacity(), 3);
848 assert_eq!(second.shrink_to_fit().size(), 0);
849 assert_eq!(tail.size(), 1);
850
851 let _ = tail.cast::<u8>().unwrap().init(0xFF);
852 (0_u16..4).for_each(|v| assert!(vec.push(v).is_ok()));
853 (4..7).for_each(|v| assert!(second.push(v).is_ok()));
854
855 assert_eq!(vec.len(), 4);
856 assert_eq!(second.len(), 3);
857
858 drop(vec);
859 drop(second);
860 assert_eq!(
861 &unsafe { *allocation.as_ptr() }[..7],
862 [0, 1, 2, 3, 4, 5, 6]);
863 }
864
865 /// Tests panics during truncation behave as expected.
866 ///
867 /// Unwinding started in a panic during truncation should not effect `Drop` calls when the
868 /// `Vec` itself is hit by the unwinding. We test this by voluntarily triggering an unwinding
869 /// and counting the number of values which have been dropped regularly (that is, during the
870 /// `Drop` of `Vec` when it is unwound).
871 ///
872 /// Note that this test is already `should_panic` and the observable failure is thus an abort
873 /// from a double panic!
874 #[test]
875 #[should_panic = "Trigger triggered"]
876 fn drop_safe_in_truncation() {
877 let mut allocation: MaybeUninit<[Trigger<'static>; 3]> = MaybeUninit::zeroed();
878 let drops = Cell::new(0);
879
880 // Is `Drop`ed *after* the Vec, and will record the number of usually dropped Triggers.
881 let _abort_mismatch_raii = AbortMismatchedDropCount {
882 counter: &drops,
883 expected: 2,
884 };
885
886 let uninit = Uninit::from(&mut allocation).as_memory();
887 let mut vec = FixedVec::new(uninit.cast_slice().unwrap());
888
889 vec.push(Trigger { panic_on_drop: false, dropped_counter: &drops }).unwrap();
890 // This one is within the truncated tail but is not dropped until unwind as truncate
891 // panics. If we were to skip dropping all values of the tail in unwind we'd notice.
892 vec.push(Trigger { panic_on_drop: false, dropped_counter: &drops }).unwrap();
893 vec.push(Trigger { panic_on_drop: true, dropped_counter: &drops }).unwrap();
894
895 // Trigger!
896 vec.truncate(1);
897 }
898
899 #[test]
900 fn fill_drops() {
901 let mut allocation: MaybeUninit<[Trigger<'static>; 2]> = MaybeUninit::zeroed();
902 let drops = Cell::new(0);
903
904 // Is `Drop`ed *after* the Vec, and will record the number of usually dropped Triggers.
905 let _abort_mismatch_raii = AbortMismatchedDropCount {
906 counter: &drops,
907 expected: 2
908 };
909
910 let uninit = Uninit::from(&mut allocation).as_memory();
911 let mut vec = FixedVec::new(uninit.cast_slice().unwrap());
912
913 vec.push(Trigger { panic_on_drop: false, dropped_counter: &drops }).unwrap();
914 // This should fill the single remaining slot in the Vec. Only one element is
915 // instantiated.
916 let _ = vec.fill(core::iter::repeat_with(
917 || Trigger { panic_on_drop: false, dropped_counter: &drops }));
918 }
919}