cao_lang/collections/
bounded_stack.rs

1use std::{
2    mem::MaybeUninit,
3    ptr::{self, drop_in_place},
4};
5use thiserror::Error;
6
7pub struct BoundedStack<T> {
8    head: usize,
9    capacity: usize,
10    storage: Box<[MaybeUninit<T>]>,
11}
12
13#[derive(Clone, Debug, Error)]
14pub enum StackError {
15    #[error("Stack is full")]
16    Full,
17}
18
19impl<T> BoundedStack<T> {
20    pub fn new(capacity: usize) -> Self {
21        let mut storage = Vec::new();
22        storage.resize_with(capacity, MaybeUninit::uninit);
23        let storage = storage.into_boxed_slice();
24        Self {
25            head: 0,
26            capacity,
27            storage,
28        }
29    }
30
31    #[inline]
32    pub fn is_empty(&self) -> bool {
33        self.len() == 0
34    }
35
36    #[inline]
37    pub fn len(&self) -> usize {
38        self.head
39    }
40
41    #[inline]
42    pub fn capacity(&self) -> usize {
43        self.capacity
44    }
45
46    pub fn iter(&self) -> impl Iterator<Item = &T> {
47        let ptr: *const MaybeUninit<T> = self.storage.as_ptr();
48        (0..self.head).map(move |i| unsafe { &*(*ptr.add(i)).as_ptr() })
49    }
50
51    pub fn iter_backwards(&self) -> impl Iterator<Item = &T> {
52        let ptr: *const MaybeUninit<T> = self.storage.as_ptr();
53        (0..self.head).map(move |i| unsafe { &*(*ptr.add(self.head - 1 - i)).as_ptr() })
54    }
55
56    pub fn push(&mut self, val: T) -> Result<(), StackError> {
57        if self.head >= self.capacity {
58            return Err(StackError::Full);
59        }
60        unsafe {
61            ptr::write(self.storage.get_unchecked_mut(self.head).as_mut_ptr(), val);
62        }
63        self.head += 1;
64        Ok(())
65    }
66
67    pub fn pop(&mut self) -> Option<T> {
68        (self.head > 0).then(|| {
69            self.head -= 1;
70            unsafe { ptr::read(self.storage.get_unchecked(self.head).as_ptr()) }
71        })
72    }
73
74    pub fn last(&self) -> Option<&T> {
75        (self.head > 0).then(|| unsafe { &*self.storage.get_unchecked(self.head - 1).as_ptr() })
76    }
77
78    pub fn last_mut(&mut self) -> Option<&mut T> {
79        (self.head > 0)
80            .then(|| unsafe { &mut *self.storage.get_unchecked_mut(self.head - 1).as_mut_ptr() })
81    }
82
83    pub fn clear(&mut self) {
84        if std::mem::needs_drop::<T>() {
85            for i in 0..self.head {
86                unsafe { drop_in_place(self.storage.get_unchecked_mut(i).as_mut_ptr()) }
87            }
88        }
89        self.head = 0;
90    }
91}
92
93impl<T> Drop for BoundedStack<T> {
94    fn drop(&mut self) {
95        self.clear();
96    }
97}
98
99#[cfg(test)]
100mod tests {
101    use super::*;
102
103    #[test]
104    fn test_drops_on_clear() {
105        let mut drops = Box::pin(0u32);
106
107        struct Foo(*mut u32);
108        impl Drop for Foo {
109            fn drop(&mut self) {
110                unsafe {
111                    *self.0 += 1;
112                }
113            }
114        }
115
116        let mut stack = BoundedStack::new(5);
117        for _ in 0..5 {
118            stack.push(Foo(drops.as_mut().get_mut() as *mut _)).unwrap();
119        }
120
121        assert_eq!(*drops, 0);
122        assert_eq!(stack.len(), 5);
123
124        stack.clear();
125
126        assert_eq!(*drops, 5);
127        assert_eq!(stack.len(), 0);
128    }
129}