bump_stack/
lib.rs

1#![doc = include_str!("../README.md")]
2#![no_std]
3
4mod chunk;
5mod iter;
6mod util;
7
8extern crate alloc;
9
10use crate::chunk::*;
11use crate::util::TypeProps;
12use alloc::alloc::{Layout, alloc, dealloc, handle_alloc_error};
13use core::cell::Cell;
14use core::iter::DoubleEndedIterator;
15use core::marker::PhantomData;
16use core::ptr::{self, NonNull};
17
18pub struct Stack<T> {
19    /// The current chunk we are bump allocating within.
20    ///
21    /// Its `next` link can point to the dead chunk, or to the cached chunk.
22    ///
23    /// Its `prev` link can point to the dead chunk or to the earlier allocated
24    /// chunk.
25    current_footer: Cell<NonNull<ChunkFooter>>,
26
27    /// The first chunk we allocated, or the dead chunk if we haven't allocated
28    /// anything yet.
29    first_footer: Cell<NonNull<ChunkFooter>>,
30
31    /// The capacity of the stack in elements.
32    capacity: Cell<usize>,
33
34    /// The number of elements currently in the stack.
35    length: Cell<usize>,
36
37    phantom: PhantomData<T>,
38}
39
40// Public API
41impl<T> Stack<T> {
42    /// Constructs a new, empty `Stack<T>`.
43    ///
44    /// The stack will not allocate until elements are pushed onto it.
45    ///
46    /// # Examples
47    ///
48    /// ```
49    /// use bump_stack::Stack;
50    ///
51    /// let mut stack: Stack<i32> = Stack::new();
52    /// ```
53    pub const fn new() -> Self {
54        Self {
55            current_footer: Cell::new(DEAD_CHUNK.footer()),
56            first_footer: Cell::new(DEAD_CHUNK.footer()),
57            capacity: Cell::new(0),
58            length: Cell::new(0),
59            phantom: PhantomData,
60        }
61    }
62
63    /// Constructs a new, empty `Stack<T>` with at least the specified capacity.
64    ///
65    /// The stack will be able to hold at least `capacity` elements without new
66    /// allocations. This method is allowed to allocate for more elements than
67    /// `capacity`. If `capacity` is zero, the stack will not allocate.
68    ///
69    /// If it is important to know the exact allocated capacity of a `Stack`,
70    /// always use the [`capacity`] method after construction.
71    ///
72    /// For `Stack<T>` where `T` is a zero-sized type, there will be no
73    /// allocation and the capacity will always be `usize::MAX`.
74    ///
75    /// [`capacity`]: Stack::capacity
76    ///
77    /// # Examples
78    ///
79    /// ```
80    /// # use bump_stack::Stack;
81    /// let mut stk = Stack::with_capacity(10);
82    ///
83    /// // The stack contains no items, even though it has capacity for more
84    /// assert_eq!(stk.len(), 0);
85    /// assert!(stk.capacity() >= 10);
86    ///
87    /// // These are all done without additional allocations...
88    /// for i in 0..10 {
89    ///     stk.push(i);
90    /// }
91    /// assert_eq!(stk.len(), 10);
92    /// assert!(stk.capacity() >= 10);
93    ///
94    /// // ...but this may make the stack allocate a new chunk
95    /// stk.push(11);
96    /// assert_eq!(stk.len(), 11);
97    /// assert!(stk.capacity() >= 11);
98    ///
99    /// // A stack of a zero-sized type will always over-allocate, since no
100    /// // allocation is necessary
101    /// let stk_units = Stack::<()>::with_capacity(10);
102    /// assert_eq!(stk_units.capacity(), usize::MAX);
103    /// ```
104    pub fn with_capacity(capacity: usize) -> Self {
105        let stack = Self::new();
106        if const { !T::IS_ZST } && capacity != 0 {
107            let chunk_size = Chunk::<T>::chunk_size_for(capacity);
108            let footer = unsafe { stack.alloc_chunk(chunk_size) };
109            stack.current_footer.set(footer);
110            stack.first_footer.set(footer);
111        }
112        stack
113    }
114
115    /// Returns the total number of elements the stack can hold without
116    /// additional allocations.
117    ///
118    /// # Examples
119    ///
120    /// ```
121    /// # use bump_stack::Stack;
122    /// let mut stk = Stack::with_capacity(10);
123    /// stk.push(42);
124    /// assert!(stk.capacity() >= 10);
125    /// ```
126    ///
127    /// A stack with zero-sized elements will always have a capacity of
128    /// `usize::MAX`:
129    ///
130    /// ```
131    /// # use bump_stack::Stack;
132    /// #[derive(Clone)]
133    /// struct ZeroSized;
134    ///
135    /// assert_eq!(std::mem::size_of::<ZeroSized>(), 0);
136    /// let stk = Stack::<ZeroSized>::with_capacity(0);
137    /// assert_eq!(stk.capacity(), usize::MAX);
138    /// ```
139    #[inline]
140    pub const fn capacity(&self) -> usize {
141        if const { T::IS_ZST } {
142            usize::MAX
143        } else {
144            self.capacity.get()
145        }
146    }
147
148    /// Returns the number of elements in the stack.
149    ///
150    /// # Examples
151    ///
152    /// ```
153    /// # use bump_stack::Stack;
154    /// let stk = Stack::from([1, 2, 3]);
155    /// assert_eq!(stk.len(), 3);
156    /// ```
157    #[inline]
158    pub const fn len(&self) -> usize {
159        self.length.get()
160    }
161
162    /// Returns `true` if the stack contains no elements.
163    ///
164    /// # Examples
165    ///
166    /// ```
167    /// # use bump_stack::Stack;
168    /// let mut s = Stack::new();
169    /// assert!(s.is_empty());
170    ///
171    /// s.push(1);
172    /// assert!(!s.is_empty());
173    /// ```
174    #[inline]
175    pub const fn is_empty(&self) -> bool {
176        self.len() == 0
177    }
178
179    /// Returns a reference to the first element of the stack, or `None` if it
180    /// is empty.
181    ///
182    /// # Examples
183    ///
184    /// ```
185    /// # use bump_stack::Stack;
186    /// let mut s = Stack::new();
187    /// assert_eq!(None, s.first());
188    ///
189    /// s.push(42);
190    /// assert_eq!(Some(&42), s.first());
191    /// ```
192    #[inline]
193    #[must_use]
194    pub fn first(&self) -> Option<&T> {
195        if !self.is_empty() {
196            unsafe {
197                let first_chunk = wrap::<T>(self.first_footer.get());
198                Some(first_chunk.first_unchecked().as_ref())
199            }
200        } else {
201            None
202        }
203    }
204
205    /// Returns a mutable reference to the first element of the slice, or `None`
206    /// if it is empty.
207    ///
208    /// # Examples
209    ///
210    /// ```
211    /// # use bump_stack::Stack;
212    /// let mut s = Stack::new();
213    /// assert_eq!(None, s.first_mut());
214    ///
215    /// s.push(1);
216    /// if let Some(first) = s.first_mut() {
217    ///     *first = 5;
218    /// }
219    /// assert_eq!(s.first(), Some(&5));
220    /// ```
221    #[inline]
222    #[must_use]
223    pub fn first_mut(&mut self) -> Option<&mut T> {
224        if !self.is_empty() {
225            unsafe {
226                let first_chunk = wrap::<T>(self.first_footer.get());
227                Some(first_chunk.first_unchecked().as_mut())
228            }
229        } else {
230            None
231        }
232    }
233
234    /// Returns the reference to last element of the stack, or `None` if it is
235    /// empty.
236    ///
237    /// # Examples
238    ///
239    /// ```
240    /// # use bump_stack::Stack;
241    /// let mut stk = Stack::new();
242    /// assert_eq!(None, stk.last());
243    ///
244    /// stk.push(1);
245    /// assert_eq!(Some(&1), stk.last());
246    /// ```
247    #[inline]
248    #[must_use]
249    pub fn last(&self) -> Option<&T> {
250        if !self.is_empty() {
251            unsafe {
252                let mut chunk = wrap::<T>(self.current_footer.get());
253                if chunk.is_empty() {
254                    chunk = wrap::<T>(chunk.prev());
255                }
256                Some(chunk.last_unchecked().as_ref())
257            }
258        } else {
259            None
260        }
261    }
262    /// Returns a mutable reference to the last item in the stack, or `None` if
263    /// it is empty.
264    ///
265    /// # Examples
266    ///
267    /// ```
268    /// # use bump_stack::Stack;
269    /// let mut stk = Stack::new();
270    /// assert_eq!(None, stk.last_mut());
271    ///
272    /// stk.push(5);
273    /// assert_eq!(Some(&mut 5), stk.last_mut());
274    ///
275    /// if let Some(last) = stk.last_mut() {
276    ///     *last = 10;
277    /// }
278    /// assert_eq!(Some(&mut 10), stk.last_mut());
279    /// ```
280    #[inline]
281    #[must_use]
282    pub fn last_mut(&mut self) -> Option<&mut T> {
283        if !self.is_empty() {
284            unsafe {
285                let mut chunk = wrap::<T>(self.current_footer.get());
286                if chunk.is_empty() {
287                    chunk = wrap::<T>(chunk.prev());
288                }
289                Some(chunk.last_unchecked().as_mut())
290            }
291        } else {
292            None
293        }
294    }
295
296    /// Appends an element to the stack returning a reference to it.
297    ///
298    /// # Panics
299    ///
300    /// Panics if the global allocator cannot allocate a new chunk of memory.
301    ///
302    /// # Examples
303    ///
304    /// ```
305    /// # use bump_stack::Stack;
306    /// let stk = Stack::new();
307    /// let new_element = stk.push(3);
308    ///
309    /// assert_eq!(new_element, &3);
310    /// assert_eq!(stk, [3]);
311    /// ```
312    ///
313    /// # Time complexity
314    ///
315    /// Takes amortized *O*(1) time. If the stack's current chunk of memory is
316    /// exhausted, it tries to use the cached one if it exists, otherwise it
317    /// tries to allocate a new chunk.
318    ///
319    /// If the new chunk of memory is too big, it tries to divide the capacity
320    /// by two and allocate it again until it reaches the minimum capacity. If
321    /// it does, it panics.
322    #[inline]
323    pub fn push(&self, value: T) -> &T {
324        self.push_with(|| value)
325    }
326
327    /// Appends an element to the stack, returning a mutable reference to it.
328    ///
329    /// # Panics
330    ///
331    /// Panics if there is no way to allocate memory for a new capacity.
332    ///
333    /// # Examples
334    ///
335    /// ```
336    /// # use bump_stack::Stack;
337    /// let mut stk = Stack::from([1, 2]);
338    /// let last = stk.push_mut(3);
339    /// assert_eq!(*last, 3);
340    /// assert_eq!(stk, [3, 2, 1]);
341    ///
342    /// let last = stk.push_mut(3);
343    /// *last += 1;
344    /// assert_eq!(stk, [4, 3, 2, 1]);
345    /// ```
346    ///
347    /// # Time complexity
348    ///
349    /// Takes amortized *O*(1) time. If the stack's current chunk of memory is
350    /// exhausted, it tries to use the cached one if it exists, otherwise it
351    /// tries to allocate a new chunk.
352    ///
353    /// If the new chunk of memory is too big, it tries to divide the capacity
354    /// by two and allocate it again until it reaches the minimum capacity. If
355    /// it does, it panics.
356    #[inline]
357    #[must_use = "if you don't need the returned value, consider using `push` instead"]
358    pub fn push_mut(&mut self, value: T) -> &mut T {
359        self.push_mut_with(|| value)
360    }
361
362    /// Pre-allocate space for an element in this stack, initializes it using
363    /// the closure, and returns a reference to the new element.
364    ///
365    /// # Examples
366    ///
367    /// ```
368    /// # use bump_stack::Stack;
369    /// let stk = Stack::new();
370    /// let new_element = stk.push_with(|| 3);
371    ///
372    /// assert_eq!(new_element, &3);
373    /// assert_eq!(stk, [3]);
374    /// ```
375    ///
376    /// # Time complexity
377    ///
378    /// Takes amortized *O*(1) time. If the stack's current chunk of memory is
379    /// exhausted, it tries to use the cached one if it exists, otherwise it
380    /// tries to allocate a new chunk.
381    ///
382    /// If the new chunk of memory is too big, it tries to divide the capacity
383    /// by two and allocate it again until it reaches the minimum capacity. If
384    /// it does, it panics.
385    #[inline(always)]
386    pub fn push_with<F>(&self, f: F) -> &T
387    where
388        F: FnOnce() -> T,
389    {
390        if const { T::IS_ZST } {
391            self.length.update(|len| len + 1);
392            unsafe { util::zst_ptr::<T>().as_ref() }
393        } else {
394            unsafe {
395                let p = self.alloc_element();
396                util::write_with(p.as_ptr(), f);
397                self.length.update(|len| len + 1);
398                p.as_ref()
399            }
400        }
401    }
402
403    /// Pre-allocate space for an element in this stack, initializes it using
404    /// the closure, and returns a mutable reference to the new element.
405    ///
406    /// # Examples
407    ///
408    /// ```
409    /// # use bump_stack::Stack;
410    /// let mut stk = Stack::from([1, 2]);
411    /// let last = stk.push_mut(3);
412    /// assert_eq!(*last, 3);
413    /// assert_eq!(stk, [3, 2, 1]);
414    ///
415    /// let last = stk.push_mut(3);
416    /// *last += 1;
417    /// assert_eq!(stk, [4, 3, 2, 1]);
418    /// ```
419    ///
420    /// # Time complexity
421    ///
422    /// Takes amortized *O*(1) time. If the stack's current chunk of memory is
423    /// exhausted, it tries to use the cached one if it exists, otherwise it
424    /// tries to allocate a new chunk.
425    ///
426    /// If the new chunk of memory is too big, it tries to divide the capacity
427    /// by two and allocate it again until it reaches the minimum capacity. If
428    /// it does, it panics.
429    #[inline(always)]
430    pub fn push_mut_with<F>(&mut self, f: F) -> &mut T
431    where
432        F: FnOnce() -> T,
433    {
434        if const { T::IS_ZST } {
435            self.length.update(|len| len + 1);
436            unsafe { util::zst_ptr::<T>().as_mut() }
437        } else {
438            unsafe {
439                let mut p = self.alloc_element();
440                util::write_with(p.as_ptr(), f);
441                self.length.update(|len| len + 1);
442                p.as_mut()
443            }
444        }
445    }
446
447    /// Removes the last element from a vector and returns it, or [`None`] if it
448    /// is empty.
449    ///
450    /// # Examples
451    ///
452    /// ```
453    /// # use bump_stack::Stack;
454    /// let mut stk = Stack::from([1, 2, 3]);
455    /// assert_eq!(stk.pop(), Some(3));
456    /// assert_eq!(stk, [2, 1]);
457    /// ```
458    #[inline]
459    pub fn pop(&mut self) -> Option<T> {
460        if const { T::IS_ZST } {
461            if self.length.get() > 0 {
462                self.length.update(|len| len - 1);
463                unsafe { Some(ptr::read(util::zst_ptr::<T>().as_ptr())) }
464            } else {
465                None
466            }
467        } else {
468            // `T` is not ZST
469            if let Some(element_ptr) = unsafe { self.dealloc_element() } {
470                self.length.update(|len| len - 1);
471                Some(unsafe { ptr::read(element_ptr.as_ptr()) })
472            } else {
473                None
474            }
475        }
476    }
477
478    /// Removes and returns the last element from a stack if the predicate
479    /// returns `true`, or [`None`] if the predicate returns false or the stack
480    /// is empty (the predicate will not be called in that case).
481    ///
482    /// # Examples
483    ///
484    /// ```
485    /// # use bump_stack::Stack;
486    /// let mut stk = Stack::from([1, 2, 3, 4]);
487    /// let pred = |x: &mut i32| *x % 2 == 0;
488    ///
489    /// assert_eq!(stk.pop_if(pred), Some(4));
490    /// assert_eq!(stk, [3, 2, 1]);
491    /// assert_eq!(stk.pop_if(pred), None);
492    /// ```
493    pub fn pop_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option<T> {
494        let last = self.last_mut()?;
495        if predicate(last) { self.pop() } else { None }
496    }
497
498    /// Clears the stack, dropping all elements.
499    ///
500    /// This method leaves the biggest chunk of memory for future allocations.
501    ///
502    /// The order of dropping elements is not defined.
503    ///
504    /// # Examples
505    ///
506    /// ```
507    /// # use bump_stack::Stack;
508    /// let mut stk = Stack::from([1, 2, 3]);
509    ///
510    /// stk.clear();
511    ///
512    /// assert!(stk.is_empty());
513    /// assert!(stk.capacity() > 0);
514    /// ```
515    pub fn clear(&mut self) {
516        let mut first_footer = self.first_footer.get();
517        let mut first_chunk = unsafe { wrap::<T>(first_footer) };
518        if first_chunk.is_dead() {
519            return;
520        }
521        loop {
522            let next_footer = first_chunk.next();
523            let next_chunk = unsafe { wrap(next_footer) };
524
525            if next_chunk.is_dead() {
526                break;
527            }
528
529            unsafe { self.dealloc_chunk(first_footer) };
530
531            first_footer = next_footer;
532            first_chunk = next_chunk;
533        }
534        let dropped = unsafe { wrap::<T>(first_footer).drop() };
535        debug_assert!(self.len() >= dropped);
536        self.length.update(|length| length - dropped);
537
538        first_chunk.set_prev(DEAD_CHUNK.footer());
539        self.first_footer.set(first_footer);
540        self.current_footer.set(first_footer);
541    }
542
543    /// Returns an iterator over the stack.
544    ///
545    /// The iterator yields all items' references in inverted order of their
546    /// insertion, corresponding to a LIFO structure behavior.
547    ///
548    /// # Examples
549    ///
550    /// ```
551    /// # use bump_stack::Stack;
552    /// let stk = Stack::from([1, 2, 4]);
553    /// let mut iterator = stk.iter();
554    ///
555    /// assert_eq!(iterator.next(), Some(&4));
556    /// assert_eq!(iterator.next(), Some(&2));
557    /// assert_eq!(iterator.next(), Some(&1));
558    /// assert_eq!(iterator.next(), None);
559    /// ```
560    ///
561    /// Since `Stack` allows to push new elements using immutable reference to
562    /// itself, you can push during iteration. But iteration is running over
563    /// elements existing at the moment of the iterator creating. It guarantees
564    /// that you won't get infinite loop.
565    ///
566    /// ```
567    /// # use bump_stack::Stack;
568    /// let stk = Stack::from([1, 2, 4]);
569    ///
570    /// for elem in stk.iter() {
571    ///     stk.push(*elem);
572    /// }
573    /// assert_eq!(stk.len(), 6);
574    /// assert_eq!(stk, [1, 2, 4, 4, 2, 1]);
575    /// ```
576    #[inline]
577    pub fn iter(&self) -> impl DoubleEndedIterator<Item = &T> {
578        crate::iter::Iter::new(self)
579    }
580
581    /// Returns a mutable iterator over the stack.
582    ///
583    /// The iterator yields all items' references in inverted order of their
584    /// insertion, corresponding to a LIFO structure behavior.
585    ///
586    /// # Examples
587    ///
588    /// ```
589    /// # use bump_stack::Stack;
590    /// let mut stk = Stack::from([1, 2, 4]);
591    ///
592    /// for elem in stk.iter_mut() {
593    ///     *elem *= 2;
594    /// }
595    ///
596    /// assert_eq!(&stk, &[8, 4, 2]);
597    /// ```
598    #[inline]
599    pub fn iter_mut(&mut self) -> impl DoubleEndedIterator<Item = &mut T> {
600        crate::iter::IterMut::new(self)
601    }
602
603    /// Returns an iterator over slices corresponding to the stack's memory
604    /// chunks.
605    ///
606    /// The iterator yields all items' references in inverted order of their
607    /// insertion, corresponding to a LIFO structure behavior.
608    ///
609    /// # Examples
610    ///
611    /// ```
612    /// # use bump_stack::Stack;
613    /// let stk = Stack::from([0, 1, 2]);
614    /// assert_eq!(stk.slices().collect::<Vec<_>>(), [&[2, 1, 0]]);
615    ///
616    /// // fill up the first chunk
617    /// for i in 3..stk.capacity() {
618    ///     stk.push(i);
619    /// }
620    /// assert_eq!(stk.slices().count(), 1);
621    ///
622    /// // create a new chunk
623    /// stk.push(42);
624    /// assert_eq!(stk.slices().count(), 2);
625    /// ```
626    #[inline]
627    pub fn slices(&self) -> impl DoubleEndedIterator<Item = &[T]> {
628        crate::iter::SliceIter::new(self)
629    }
630
631    /// Returns an iterator over mutable slices corresponding to the stack's
632    /// memory chunks.
633    ///
634    /// The iterator yields all items' references in inverted order of their
635    /// insertion, corresponding to a LIFO structure behavior.
636    ///
637    /// # Examples
638    ///
639    /// ```
640    /// # use bump_stack::Stack;
641    /// let mut stk = Stack::from([0, 1, 2]);
642    ///
643    /// let slice = stk.slices_mut().next().unwrap();
644    /// assert_eq!(slice, [2, 1, 0]);
645    /// slice[0] = 42;
646    /// assert_eq!(slice, [42, 1, 0]);
647    /// ```
648    #[inline]
649    pub fn slices_mut(&mut self) -> impl DoubleEndedIterator<Item = &mut [T]> {
650        crate::iter::SliceIterMut::new(self)
651    }
652}
653
654impl<T> core::default::Default for Stack<T> {
655    /// Creates an empty `Stack<T>`.
656    ///
657    /// The stack will not allocate until elements are pushed onto it.
658    #[inline]
659    fn default() -> Self {
660        Self::new()
661    }
662}
663
664impl<T, const N: usize> core::convert::From<&[T; N]> for Stack<T>
665where
666    T: Clone,
667{
668    /// Creates a `Stack<T>` with a chunk big enough to contain N items and
669    /// fills it by cloning `slice`'s items.
670    fn from(slice: &[T; N]) -> Self {
671        let stk = Stack::with_capacity(N);
672        for item in slice {
673            stk.push(item.clone());
674        }
675        stk
676    }
677}
678
679impl<T, const N: usize> core::convert::From<&mut [T; N]> for Stack<T>
680where
681    T: Clone,
682{
683    /// Creates a `Stack<T>` with a chunk big enough to contain N items and
684    /// fills it by cloning `slice`'s items.
685    #[inline]
686    fn from(slice: &mut [T; N]) -> Self {
687        core::convert::From::<&[T; N]>::from(slice)
688    }
689}
690
691impl<T, const N: usize> core::convert::From<[T; N]> for Stack<T>
692where
693    T: Clone,
694{
695    /// Creates a `Stack<T>` with a chunk big enough to contain N items and
696    /// fills it by cloning `array`'s items.
697    #[inline]
698    fn from(array: [T; N]) -> Self {
699        core::convert::From::<&[T; N]>::from(&array)
700    }
701}
702
703impl<T> core::convert::From<&[T]> for Stack<T>
704where
705    T: Clone,
706{
707    /// Creates a `Stack<T>` with a chunk big enough to contain N items and
708    /// fills it by cloning `slice`'s items.
709    fn from(slice: &[T]) -> Self {
710        let stk = Stack::with_capacity(slice.len());
711        for item in slice {
712            stk.push(item.clone());
713        }
714        stk
715    }
716}
717
718impl<T> core::convert::From<&mut [T]> for Stack<T>
719where
720    T: Clone,
721{
722    /// Creates a `Stack<T>` with a chunk big enough to contain N items and
723    /// fills it by cloning `slice`'s items.
724    #[inline]
725    fn from(slice: &mut [T]) -> Self {
726        core::convert::From::<&[T]>::from(slice)
727    }
728}
729
730impl<T> core::ops::Drop for Stack<T> {
731    #[inline]
732    fn drop(&mut self) {
733        self.clear();
734        unsafe {
735            let current_chunk = wrap::<T>(self.current_footer.get());
736            if !current_chunk.is_dead() {
737                debug_assert!(wrap::<T>(current_chunk.prev()).is_dead());
738                debug_assert!(wrap::<T>(current_chunk.next()).is_dead());
739                self.dealloc_chunk(self.current_footer.get());
740            }
741        }
742    }
743}
744
745impl<'a, T> core::iter::IntoIterator for &'a Stack<T> {
746    type Item = &'a T;
747    type IntoIter = crate::iter::Iter<'a, T>;
748
749    #[inline]
750    fn into_iter(self) -> Self::IntoIter {
751        crate::iter::Iter::new(self)
752    }
753}
754
755impl<'a, T> core::iter::IntoIterator for &'a mut Stack<T> {
756    type Item = &'a mut T;
757    type IntoIter = crate::iter::IterMut<'a, T>;
758
759    #[inline]
760    fn into_iter(self) -> Self::IntoIter {
761        crate::iter::IterMut::new(self)
762    }
763}
764
765impl<T> core::iter::IntoIterator for Stack<T> {
766    type Item = T;
767    type IntoIter = crate::iter::IntoIter<T>;
768
769    #[inline]
770    fn into_iter(self) -> Self::IntoIter {
771        crate::iter::IntoIter::new(self)
772    }
773}
774
775impl<T, U> core::cmp::PartialEq<[U]> for Stack<T>
776where
777    T: core::cmp::PartialEq<U>,
778{
779    fn eq(&self, other: &[U]) -> bool {
780        self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
781    }
782}
783
784impl<T, U> core::cmp::PartialEq<&[U]> for Stack<T>
785where
786    T: core::cmp::PartialEq<U>,
787{
788    #[inline]
789    fn eq(&self, other: &&[U]) -> bool {
790        core::cmp::PartialEq::<[U]>::eq(self, other)
791    }
792}
793
794impl<T, U> core::cmp::PartialEq<&mut [U]> for Stack<T>
795where
796    T: core::cmp::PartialEq<U>,
797{
798    #[inline]
799    fn eq(&self, other: &&mut [U]) -> bool {
800        core::cmp::PartialEq::<[U]>::eq(self, other)
801    }
802}
803
804impl<T, U, const N: usize> core::cmp::PartialEq<[U; N]> for Stack<T>
805where
806    T: core::cmp::PartialEq<U>,
807{
808    fn eq(&self, other: &[U; N]) -> bool {
809        self.len() == N && self.iter().zip(other.iter()).all(|(a, b)| a == b)
810    }
811}
812
813impl<T, U, const N: usize> core::cmp::PartialEq<&[U; N]> for Stack<T>
814where
815    T: core::cmp::PartialEq<U>,
816{
817    #[inline]
818    fn eq(&self, other: &&[U; N]) -> bool {
819        core::cmp::PartialEq::<[U; N]>::eq(self, other)
820    }
821}
822
823impl<T, U, const N: usize> core::cmp::PartialEq<&mut [U; N]> for Stack<T>
824where
825    T: core::cmp::PartialEq<U>,
826{
827    #[inline]
828    fn eq(&self, other: &&mut [U; N]) -> bool {
829        core::cmp::PartialEq::<[U; N]>::eq(self, other)
830    }
831}
832
833impl<T, U, const N: usize> core::cmp::PartialEq<Stack<U>> for [T; N]
834where
835    T: core::cmp::PartialEq<U>,
836{
837    fn eq(&self, other: &Stack<U>) -> bool {
838        self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
839    }
840}
841
842impl<T, U, const N: usize> core::cmp::PartialEq<Stack<U>> for &[T; N]
843where
844    T: core::cmp::PartialEq<U>,
845{
846    fn eq(&self, other: &Stack<U>) -> bool {
847        *self == other
848    }
849}
850
851impl<T, U, const N: usize> core::cmp::PartialEq<Stack<U>> for &mut [T; N]
852where
853    T: core::cmp::PartialEq<U>,
854{
855    fn eq(&self, other: &Stack<U>) -> bool {
856        *self == other
857    }
858}
859
860impl<T> core::fmt::Debug for Stack<T>
861where
862    T: core::fmt::Debug,
863{
864    #[inline]
865    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
866        f.debug_list().entries(self.iter()).finish()
867    }
868}
869
870/// Maximum typical overhead per allocation imposed by allocators.
871const ALLOC_OVERHEAD: usize = 16;
872
873// Private API
874impl<T> Stack<T> {
875    /// Allocates memory for a new element and return a pointer to it.
876    ///
877    /// # Safety
878    ///
879    /// The caller must ensure that the method is called only for non zero-sized
880    /// types.
881    #[inline(always)]
882    unsafe fn alloc_element(&self) -> NonNull<T> {
883        debug_assert!(!T::IS_ZST);
884
885        let curr_chunk = unsafe { wrap::<T>(self.current_footer.get()) };
886        if let Some(ptr) = curr_chunk.alloc_element() {
887            ptr
888        } else {
889            unsafe { self.alloc_element_slow() }
890        }
891    }
892
893    // Should be run only if the current chunk is full
894    unsafe fn alloc_element_slow(&self) -> NonNull<T> {
895        debug_assert!(!T::IS_ZST);
896        unsafe {
897            let current_chunk = wrap::<T>(self.current_footer.get());
898            let next_footer = current_chunk.next();
899            let next_chunk = wrap::<T>(next_footer);
900            let prev_chunk = wrap::<T>(current_chunk.prev());
901
902            debug_assert!(current_chunk.is_full());
903
904            if current_chunk.is_dead() {
905                // this is initial state without allocated chunks at all
906                debug_assert!(current_chunk.is_dead());
907                debug_assert!(prev_chunk.is_dead());
908                debug_assert!(next_chunk.is_dead());
909
910                let new_footer_ptr = self.alloc_chunk(Chunk::<T>::CHUNK_FIRST_SIZE);
911                self.current_footer.set(new_footer_ptr);
912                self.first_footer.set(new_footer_ptr);
913            } else {
914                // at least the current chunk is not dead
915                if next_chunk.is_dead() {
916                    // the current chunk is single, so create a new one, and
917                    // make it the current chunk.
918                    let current_chunk_size = current_chunk.size();
919                    let new_chunk_size = if current_chunk_size == Chunk::<T>::CHUNK_MAX_SIZE {
920                        Chunk::<T>::CHUNK_MAX_SIZE
921                    } else {
922                        debug_assert!(current_chunk_size < Chunk::<T>::CHUNK_MAX_SIZE);
923                        ((current_chunk_size + ALLOC_OVERHEAD) << 1) - ALLOC_OVERHEAD
924                    };
925
926                    debug_assert!((new_chunk_size + ALLOC_OVERHEAD).is_power_of_two());
927
928                    let new_footer = self.alloc_chunk(new_chunk_size);
929                    let new_chunk = wrap::<T>(new_footer);
930
931                    current_chunk.set_next(new_footer);
932                    new_chunk.set_prev(self.current_footer.get());
933
934                    self.current_footer.set(new_footer);
935                } else {
936                    // there is a next empty chunk, so make it the current chunk
937                    debug_assert!(next_chunk.is_empty());
938                    self.current_footer.set(next_footer);
939                }
940            }
941
942            let curr_chunk = wrap::<T>(self.current_footer.get());
943            curr_chunk.alloc_element().unwrap_unchecked()
944        }
945    }
946
947    /// Creates a new chunk with the given size. If it can't allocate a chunk
948    /// with the given size, it tries to allocate a chunk with a two times
949    /// smaller size. Otherwise, it panics.
950    ///
951    /// Properties `data`, `ptr`, and `layout` are initialized to the values
952    /// of the newly allocated chunk.
953    ///
954    /// Properties `prev` and `next` point to the `DEAD_CHUNK`, so you should
955    /// reinitialize them to needed values, if there exist another chunks in the
956    /// list.
957    unsafe fn alloc_chunk(&self, chunk_size: usize) -> NonNull<ChunkFooter> {
958        debug_assert!(chunk_size <= Chunk::<T>::CHUNK_MAX_SIZE);
959
960        let mut chunk_size = chunk_size;
961        let chunk_align = Chunk::<T>::CHUNK_ALIGN;
962
963        let (chunk_ptr, chunk_layout) = loop {
964            // checks for `Layout::from_size_align_unchecked`
965            debug_assert!(chunk_align != 0);
966            debug_assert!(chunk_align.is_power_of_two());
967            debug_assert!((chunk_size + ALLOC_OVERHEAD).is_power_of_two());
968            debug_assert!(chunk_size <= isize::MAX as usize);
969
970            let chunk_layout =
971                unsafe { Layout::from_size_align_unchecked(chunk_size, chunk_align) };
972
973            let chunk_ptr = unsafe { alloc(chunk_layout) };
974            if !chunk_ptr.is_null() {
975                assert!(util::ptr_is_aligned_to(chunk_ptr, Chunk::<T>::CHUNK_ALIGN));
976                break (chunk_ptr, chunk_layout);
977            }
978
979            // if couldn't get a new chunk, try to shrink the chunk size by half
980            chunk_size = ((chunk_size + ALLOC_OVERHEAD) >> 1) - ALLOC_OVERHEAD;
981            if chunk_size < Chunk::<T>::CHUNK_MIN_SIZE {
982                handle_alloc_error(chunk_layout);
983            }
984        };
985
986        let chunk_ptr = unsafe { NonNull::new_unchecked(chunk_ptr) };
987        let (footer_ptr, chunk_capacity) = unsafe { Chunk::<T>::init(chunk_ptr, chunk_layout) };
988
989        self.capacity.update(|cap| cap + chunk_capacity);
990
991        footer_ptr
992    }
993
994    #[inline(always)]
995    unsafe fn dealloc_element(&mut self) -> Option<NonNull<T>> {
996        debug_assert!(!T::IS_ZST);
997
998        unsafe {
999            let curr_chunk = wrap::<T>(self.current_footer.get());
1000            if let Some(ptr) = curr_chunk.dealloc_element() {
1001                Some(ptr)
1002            } else {
1003                self.dealloc_element_slow()
1004            }
1005        }
1006    }
1007
1008    unsafe fn dealloc_element_slow(&mut self) -> Option<NonNull<T>> {
1009        unsafe {
1010            let current_footer = self.current_footer.get();
1011            let current_chunk = wrap::<T>(current_footer);
1012
1013            let next_footer = current_chunk.next();
1014            let next_chunk = wrap::<T>(next_footer);
1015
1016            let prev_footer = current_chunk.prev();
1017            let prev_chunk = wrap::<T>(prev_footer);
1018
1019            if current_chunk.is_dead() {
1020                debug_assert!(next_chunk.is_dead());
1021                debug_assert!(prev_chunk.is_dead());
1022                return None;
1023            }
1024
1025            debug_assert!(current_chunk.is_empty());
1026            debug_assert!(next_chunk.is_empty());
1027
1028            if !next_chunk.is_dead() {
1029                if current_chunk.size() < next_chunk.size() {
1030                    debug_assert!(wrap::<T>(next_chunk.next()).is_dead());
1031
1032                    next_chunk.set_prev(prev_footer);
1033                    self.current_footer.set(next_footer);
1034
1035                    self.dealloc_chunk(current_footer);
1036                } else {
1037                    self.dealloc_chunk(next_footer);
1038                }
1039                let current_chunk = wrap::<T>(self.current_footer.get());
1040                current_chunk.set_next(DEAD_CHUNK.footer());
1041            }
1042
1043            if prev_chunk.is_dead() {
1044                self.first_footer.set(self.current_footer.get());
1045                None
1046            } else {
1047                // check if prev_footer is full
1048                debug_assert!(prev_chunk.is_full());
1049
1050                prev_chunk.set_next(self.current_footer.get());
1051                self.current_footer.set(prev_footer);
1052
1053                let curr_chunk = wrap::<T>(self.current_footer.get());
1054                curr_chunk.dealloc_element()
1055            }
1056        }
1057    }
1058
1059    unsafe fn dealloc_chunk(&mut self, footer: NonNull<ChunkFooter>) {
1060        unsafe {
1061            let chunk = wrap::<T>(footer);
1062            let dropped = chunk.drop();
1063            debug_assert!(self.len() >= dropped);
1064            self.length.update(|length| length - dropped);
1065
1066            let chunk_capacity = chunk.capacity();
1067            debug_assert!(chunk_capacity <= self.capacity());
1068            self.capacity.update(|cap| cap - chunk_capacity);
1069            debug_assert!(self.len() <= self.capacity());
1070            dealloc(chunk.start().cast().as_ptr(), chunk.layout());
1071        }
1072    }
1073}
1074
1075/// Unit tests.
1076#[cfg(test)]
1077mod utest;