stack_dst/
stack.rs

1use core::{iter, marker, mem, ops, ptr};
2
3mod impls;
4
5// Implementation Notes
6// -----
7//
8// The data array is filled from the back, with the metadata stored before (at a lower memory address)
9// the actual data. This so the code can use a single integer to track the position (using size_of_val
10// when popping items, and the known size when pushing).
11
12/// A fixed-capacity stack that can contain dynamically-sized types
13///
14/// Uses an array of usize as a backing store for a First-In, Last-Out stack
15/// of items that can unsize to `T`.
16///
17/// Note: Each item in the stack takes at least one slot in the buffer (to store the metadata)
18pub struct Stack<T: ?Sized, D: ::DataBuf> {
19    _pd: marker::PhantomData<*const T>,
20    // Offset from the _back_ of `data` to the next free position.
21    // I.e. data[data.len() - cur_ofs] is the first metadata word
22    next_ofs: usize,
23    data: D,
24}
25
26impl<T: ?Sized, D: ::DataBuf> ops::Drop for Stack<T, D> {
27    fn drop(&mut self) {
28        while !self.is_empty() {
29            self.pop();
30        }
31    }
32}
33impl<T: ?Sized, D: ::DataBuf + Default> Default for Stack<T, D> {
34    fn default() -> Self {
35        Stack::new()
36    }
37}
38
39impl<T: ?Sized, D: ::DataBuf> Stack<T, D> {
40    /// Construct a new (empty) stack
41    pub fn new() -> Self
42    where
43        D: Default,
44    {
45        Self::with_buffer(D::default())
46    }
47    /// Construct a new (empty) stack using the provided buffer
48    pub fn with_buffer(data: D) -> Self {
49        Stack {
50            _pd: marker::PhantomData,
51            next_ofs: 0,
52            data,
53        }
54    }
55
56    /// Tests if the stack is empty
57    pub fn is_empty(&self) -> bool {
58        self.next_ofs == 0
59    }
60
61    fn meta_words() -> usize {
62        D::round_to_words(mem::size_of::<&T>() - mem::size_of::<usize>())
63    }
64
65    /// Push a value at the top of the stack
66    ///
67    /// ```
68    /// # use stack_dst::Stack;
69    /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new();
70    /// stack.push([1, 2, 3]);
71    /// ```
72    #[cfg(feature = "unsize")]
73    pub fn push<U: marker::Unsize<T>>(&mut self, v: U) -> Result<(), U>
74    where
75        (U, D::Inner): crate::AlignmentValid,
76    {
77        self.push_stable(v, |p| p)
78    }
79
80    /// Push a value at the top of the stack (without using `Unsize`)
81    ///
82    /// ```
83    /// # use stack_dst::Stack;
84    /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new();
85    /// stack.push_stable([1, 2,3], |v| v);
86    /// ```
87    pub fn push_stable<U, F: FnOnce(&U) -> &T>(&mut self, v: U, f: F) -> Result<(), U>
88    where
89        (U, D::Inner): crate::AlignmentValid,
90    {
91        <(U, D::Inner) as crate::AlignmentValid>::check();
92
93        // SAFE: Destination address is valid
94        unsafe {
95            match self.push_inner(crate::check_fat_pointer(&v, f)) {
96                Ok(pii) => {
97                    ptr::write(pii.data.as_mut_ptr() as *mut U, v);
98                    Ok(())
99                }
100                Err(_) => Err(v),
101            }
102        }
103    }
104
105    unsafe fn raw_at(&self, ofs: usize) -> *mut T {
106        let dar = self.data.as_ref();
107        let meta = &dar[dar.len() - ofs..];
108        let mw = Self::meta_words();
109        let (meta, data) = meta.split_at(mw);
110        super::make_fat_ptr(data.as_ptr() as *mut (), meta)
111    }
112    unsafe fn raw_at_mut(&mut self, ofs: usize) -> *mut T {
113        let dar = self.data.as_mut();
114        let ofs = dar.len() - ofs;
115        let meta = &mut dar[ofs..];
116        let mw = Self::meta_words();
117        let (meta, data) = meta.split_at_mut(mw);
118        super::make_fat_ptr(data.as_mut_ptr() as *mut (), meta)
119    }
120    // Get a raw pointer to the top of the stack
121    fn top_raw(&self) -> Option<*mut T> {
122        if self.next_ofs == 0 {
123            None
124        } else {
125            // SAFE: Internal consistency maintains the metadata validity
126            Some(unsafe { self.raw_at(self.next_ofs) })
127        }
128    }
129    // Get a raw pointer to the top of the stack
130    fn top_raw_mut(&mut self) -> Option<*mut T> {
131        if self.next_ofs == 0 {
132            None
133        } else {
134            // SAFE: Internal consistency maintains the metadata validity
135            Some(unsafe { self.raw_at_mut(self.next_ofs) })
136        }
137    }
138    /// Returns a pointer to the top item on the stack
139    pub fn top(&self) -> Option<&T> {
140        self.top_raw().map(|x| unsafe { &*x })
141    }
142    /// Returns a pointer to the top item on the stack (unique/mutable)
143    pub fn top_mut(&mut self) -> Option<&mut T> {
144        self.top_raw_mut().map(|x| unsafe { &mut *x })
145    }
146    /// Pop the top item off the stack
147    pub fn pop(&mut self) {
148        if let Some(ptr) = self.top_raw_mut() {
149            assert!(self.next_ofs > 0);
150            // SAFE: Pointer is valid, and will never be accessed after this point
151            let words = unsafe {
152                let size = mem::size_of_val(&*ptr);
153                ptr::drop_in_place(ptr);
154                D::round_to_words(size)
155            };
156            self.next_ofs -= words + Self::meta_words();
157        }
158    }
159
160    /// Obtain an immutable iterator (yields references to items, in the order they would be popped)
161    /// ```
162    /// let mut list = ::stack_dst::Stack::<str, ::stack_dst::buffers::Ptr8>::new();
163    /// list.push_str("Hello");
164    /// list.push_str("world");
165    /// let mut it = list.iter();
166    /// assert_eq!(it.next(), Some("world"));
167    /// assert_eq!(it.next(), Some("Hello"));
168    /// assert_eq!(it.next(), None);
169    /// ```
170    pub fn iter(&self) -> Iter<T, D> {
171        Iter(self, self.next_ofs)
172    }
173    /// Obtain unique/mutable iterator
174    /// ```
175    /// let mut list = ::stack_dst::Stack::<[u8], ::stack_dst::buffers::Ptr8>::new();
176    /// list.push_copied(&[1,2,3]);
177    /// list.push_copied(&[9]);
178    /// for v in list.iter_mut() {
179    ///     v[0] -= 1;
180    /// }
181    /// let mut it = list.iter();
182    /// assert_eq!(it.next(), Some(&[8][..]));
183    /// assert_eq!(it.next(), Some(&[0,2,3][..]));
184    /// assert_eq!(it.next(), None);
185    /// ```
186    pub fn iter_mut(&mut self) -> IterMut<T, D> {
187        IterMut(self, self.next_ofs)
188    }
189}
190
191struct PushInnerInfo<'a, DInner> {
192    /// Buffer for value data
193    data: &'a mut crate::BufSlice<DInner>,
194    /// Buffer for metadata (length/vtable)
195    meta: &'a mut crate::BufSlice<DInner>,
196    /// Memory location for resetting the push
197    reset_slot: &'a mut usize,
198    reset_value: usize,
199}
200impl<T: ?Sized, D: ::DataBuf> Stack<T, D> {
201
202    /// See `push_inner_raw`
203    unsafe fn push_inner(&mut self, fat_ptr: &T) -> Result<PushInnerInfo<D::Inner>, ()> {
204        let bytes = mem::size_of_val(fat_ptr);
205        let (_data_ptr, len, v) = crate::decompose_pointer(fat_ptr);
206        self.push_inner_raw(bytes, &v[..len])
207    }
208
209    /// Returns:
210    /// - metadata slot
211    /// - data slot
212    /// - Total words used
213    unsafe fn push_inner_raw(&mut self, bytes: usize, metadata: &[usize]) -> Result<PushInnerInfo<D::Inner>, ()> {
214        assert!( D::round_to_words(mem::size_of_val(metadata)) == Self::meta_words() );
215        let words = D::round_to_words(bytes) + Self::meta_words();
216
217        let req_space = self.next_ofs + words;
218        // Attempt resize (if the underlying buffer allows it)
219        if req_space > self.data.as_ref().len() {
220            let old_len = self.data.as_ref().len();
221            if let Ok(_) = self.data.extend(req_space) {
222                let new_len = self.data.as_ref().len();
223                self.data.as_mut().rotate_right(new_len - old_len);
224            }
225        }
226
227        // Check if there is sufficient space for the new item
228        if req_space <= self.data.as_ref().len() {
229            // Get the base pointer for the new item
230            let prev_next_ofs = self.next_ofs;
231            self.next_ofs += words;
232            let len = self.data.as_ref().len();
233            let slot = &mut self.data.as_mut()[len - self.next_ofs..][..words];
234            let (meta, rv) = slot.split_at_mut(Self::meta_words());
235
236            // Populate the metadata
237            super::store_metadata(meta, metadata);
238
239            // Increment offset and return
240            Ok(PushInnerInfo {
241                meta,
242                data: rv,
243                reset_slot: &mut self.next_ofs,
244                reset_value: prev_next_ofs
245                })
246        } else {
247            Err(())
248        }
249    }
250}
251
252impl<D: ::DataBuf> Stack<str, D> {
253    /// Push the contents of a string slice as an item onto the stack
254    ///
255    /// ```
256    /// # use stack_dst::Stack;
257    /// let mut stack = Stack::<str, ::stack_dst::buffers::U8_32>::new();
258    /// stack.push_str("Hello!");
259    /// ```
260    pub fn push_str(&mut self, v: &str) -> Result<(), ()> {
261        unsafe {
262            self.push_inner(v)
263                .map(|pii| ptr::copy(v.as_bytes().as_ptr(), pii.data.as_mut_ptr() as *mut u8, v.len()))
264        }
265    }
266}
267impl<D: ::DataBuf, T: Clone> Stack<[T], D>
268where
269    (T, D::Inner): crate::AlignmentValid,
270{
271    /// Pushes a set of items (cloning out of the input slice)
272    ///
273    /// ```
274    /// # use stack_dst::Stack;
275    /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new();
276    /// stack.push_cloned(&[1, 2, 3]);
277    /// ```
278    pub fn push_cloned(&mut self, v: &[T]) -> Result<(), ()> {
279        <(T, D::Inner) as crate::AlignmentValid>::check();
280        self.push_from_iter(v.iter().cloned())
281    }
282    /// Pushes a set of items (copying out of the input slice)
283    ///
284    /// ```
285    /// # use stack_dst::Stack;
286    /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new();
287    /// stack.push_copied(&[1, 2, 3]);
288    /// ```
289    pub fn push_copied(&mut self, v: &[T]) -> Result<(), ()>
290    where
291        T: Copy,
292    {
293        <(T, D::Inner) as crate::AlignmentValid>::check();
294        // SAFE: Carefully constructed to maintain consistency
295        unsafe {
296            self.push_inner(v).map(|pii| {
297                ptr::copy(
298                    v.as_ptr() as *const u8,
299                    pii.data.as_mut_ptr() as *mut u8,
300                    mem::size_of_val(v),
301                )
302            })
303        }
304    }
305}
306impl<D: crate::DataBuf, T> Stack<[T], D>
307where
308    (T, D::Inner): crate::AlignmentValid,
309{
310    /// Push an item, populated from an exact-sized iterator
311    /// 
312    /// ```
313    /// # extern crate core;
314    /// # use stack_dst::Stack;
315    /// # use core::fmt::Display;
316    /// let mut stack = Stack::<[u8], stack_dst::buffers::Ptr8>::new();
317    /// stack.push_from_iter(0..10);
318    /// assert_eq!(stack.top().unwrap(), &[0,1,2,3,4,5,6,7,8,9]);
319    /// ```
320    pub fn push_from_iter(&mut self, mut iter: impl ExactSizeIterator<Item=T>) -> Result<(),()> {
321        <(T, D::Inner) as crate::AlignmentValid>::check();
322        // SAFE: API used correctly
323        unsafe {
324            let pii = self.push_inner_raw(iter.len() * mem::size_of::<T>(), &[0])?;
325            crate::list_push_gen(pii.meta, pii.data, iter.len(), |_| iter.next().unwrap(), pii.reset_slot, pii.reset_value);
326            Ok( () )
327        }
328    }
329}
330
331/// DST Stack iterator (immutable)
332pub struct Iter<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf>(&'a Stack<T, D>, usize);
333impl<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> iter::Iterator for Iter<'a, T, D> {
334    type Item = &'a T;
335    fn next(&mut self) -> Option<&'a T> {
336        if self.1 == 0 {
337            None
338        } else {
339            // SAFE: Bounds checked, aliasing enforced by API
340            let rv = unsafe { &*self.0.raw_at(self.1) };
341            self.1 -= Stack::<T, D>::meta_words() + D::round_to_words(mem::size_of_val(rv));
342            Some(rv)
343        }
344    }
345}
346
347/// DST Stack iterator (immutable)
348pub struct IterMut<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf>(&'a mut Stack<T, D>, usize);
349impl<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> iter::Iterator for IterMut<'a, T, D> {
350    type Item = &'a mut T;
351    fn next(&mut self) -> Option<&'a mut T> {
352        if self.1 == 0 {
353            None
354        } else {
355            // SAFE: Bounds checked, aliasing enforced by API
356            let rv = unsafe { &mut *self.0.raw_at_mut(self.1) };
357            self.1 -= Stack::<T, D>::meta_words() + D::round_to_words(mem::size_of_val(rv));
358            Some(rv)
359        }
360    }
361}