allocated/vec/
_allocated.rs

1//! Dynamic array implementation using the _allocated_ pattern.
2//!
3//! This implementation is heavily based on
4//! [The Rustonomicon][nomicon] book.
5//!
6//! [nomicon]: https://doc.rust-lang.org/nomicon
7
8use core::fmt;
9use core::marker::PhantomData;
10use core::mem;
11use core::ops::{Deref, DerefMut};
12
13use allocator_api2::alloc::Allocator;
14
15use crate::_drop_guard::DropGuardResult;
16use crate::_error::AllocResult;
17use crate::_traits::{DropIn, FromIteratorIn};
18
19use super::_raw_vec::{IntoIter, RawValIter, RawVec};
20
21/// An allocated variant of a dynamic array.
22pub struct AllocatedVec<T> {
23    buf: RawVec<T>,
24    len: usize,
25}
26
27impl<T: fmt::Debug> fmt::Debug for AllocatedVec<T> {
28    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
29        fmt.debug_list().entries(self.iter()).finish()
30    }
31}
32
33impl<T: PartialEq> PartialEq for AllocatedVec<T> {
34    fn eq(&self, other: &Self) -> bool {
35        if self.len != other.len {
36            return false;
37        }
38
39        self.iter().zip(other.iter()).all(|(x, y)| x == y)
40    }
41}
42
43impl<T: Eq> Eq for AllocatedVec<T> {}
44
45impl<T> AllocatedVec<T> {
46    fn ptr(&self) -> *mut T {
47        self.buf.ptr()
48    }
49
50    pub fn capacity(&self) -> usize {
51        self.buf.capacity()
52    }
53
54    pub fn new_in<A: Allocator>(alloc: &A) -> DropGuardResult<Self, &A> {
55        Ok(RawVec::new_in(alloc)?.map(|buf| AllocatedVec { buf, len: 0 }))
56    }
57
58    pub fn try_with_capacity_in<A: Allocator>(
59        alloc: &A,
60        capacity: usize,
61    ) -> DropGuardResult<Self, &A> {
62        Ok(RawVec::try_with_capacity_in(alloc, capacity)?.map(|buf| AllocatedVec { buf, len: 0 }))
63    }
64
65    /// Shrink capacity to match `len`.
66    ///
67    /// # Safety
68    ///
69    /// `alloc` MUST be the allocator used to allocate this `AllocatedVec`
70    pub unsafe fn shrink_to_fit_in<A: Allocator>(&mut self, alloc: &A) -> AllocResult<()> {
71        self.buf.shrink_in(alloc, self.len)
72    }
73
74    /// Insert element `elem` at the end of the vector. If memory needs to be
75    /// allocated, `alloc` will be used.
76    ///
77    /// # Safety
78    ///
79    /// `alloc` MUST be the allocator used to allocate this `AllocatedSortedVec`
80    pub unsafe fn push_in<A: Allocator>(&mut self, alloc: &A, elem: T) -> AllocResult<()> {
81        if self.len == self.capacity() {
82            self.buf.grow_in(alloc)?;
83        }
84
85        // Safety: We just checked that there's enough space allocated
86        let ptr = unsafe { self.ptr().add(self.len) };
87        // Safety: We just checked that there's enough space allocated
88        unsafe { core::ptr::write(ptr, elem) };
89
90        // Can't overflow, we'll OOM first.
91        self.len += 1;
92
93        Ok(())
94    }
95
96    pub fn pop(&mut self) -> Option<T> {
97        if self.len == 0 {
98            None
99        } else {
100            self.len -= 1;
101            // Safety: self.len < self.capacity
102            let ptr = unsafe { self.ptr().add(self.len) };
103            // Safety: ptr is an aligned pointer to allocated memory
104            unsafe { Some(core::ptr::read(ptr)) }
105        }
106    }
107
108    /// Insert element `elem` at `index`. If memory needs
109    /// to be allocated, `alloc` will be used.
110    ///
111    /// # Safety
112    ///
113    /// `alloc` MUST be the allocator used to allocate this `AllocatedSortedVec`
114    pub unsafe fn insert_in<A: Allocator>(
115        &mut self,
116        alloc: &A,
117        index: usize,
118        elem: T,
119    ) -> AllocResult<()> {
120        assert!(index <= self.len, "index out of bounds");
121        if self.capacity() == self.len {
122            self.buf.grow_in(alloc)?;
123        }
124
125        // Safety: index <= self.len < self.capacity
126        let ptr1 = unsafe { self.ptr().add(index) };
127        // Safety: index <= self.len < self.capacity
128        let ptr2 = unsafe { self.ptr().add(index + 1) };
129
130        // Safety: All writes will occur within our buffer.
131        unsafe { core::ptr::copy(ptr1, ptr2, self.len - index) };
132        // Safety: Write occurs within buffer.
133        unsafe { core::ptr::write(ptr1, elem) };
134
135        self.len += 1;
136
137        Ok(())
138    }
139
140    /// Reserves capacity for at least `additional` more elements to be inserted
141    /// in the given `AllocatedVec<T>`. The collection may reserve more space.
142    /// After calling reserve, capacity will be greater than or equal to
143    /// `self.len() + additional`. Guaranteed to do nothing if capacity is
144    /// already sufficient.
145    ///
146    /// # Safety
147    ///
148    /// alloc MUST be the allocator used to allocate this AllocatedVec
149    pub unsafe fn try_reserve_in<A: Allocator>(
150        &mut self,
151        alloc: &A,
152        additional: usize,
153    ) -> AllocResult<()> {
154        self.buf.try_reserve_in(alloc, self.len, additional)
155    }
156
157    /// Extend AllocatedVec using the contents of the iterator
158    ///
159    /// # Safety
160    ///
161    /// alloc MUST be the allocator used to allocate this AllocatedVec
162    pub unsafe fn extend_in<A: Allocator, K: IntoIterator<Item = T>>(
163        &mut self,
164        alloc: &A,
165        elems: K,
166    ) -> AllocResult<()> {
167        let elems = elems.into_iter();
168        let (_, upper_bound) = elems.size_hint();
169        if let Some(upper_bound) = upper_bound {
170            self.try_reserve_in(alloc, upper_bound)?;
171        }
172
173        for e in elems {
174            self.push_in(alloc, e)?;
175        }
176
177        Ok(())
178    }
179
180    pub fn truncate(&mut self, len: usize) {
181        if len > self.len {
182            return;
183        }
184        let remaining_len = self.len - len;
185        // Safety: len < self.len; ptr points to a section of our allocated buffer
186        let ptr = unsafe { self.as_mut_ptr().add(len) };
187        let s = core::ptr::slice_from_raw_parts_mut(ptr, remaining_len);
188        self.len = len;
189        // Safety: s is a valid slice of instances of `T`
190        unsafe { core::ptr::drop_in_place(s) };
191    }
192
193    pub fn clear(&mut self) {
194        while self.pop().is_some() {}
195    }
196
197    pub fn remove(&mut self, index: usize) -> T {
198        assert!(index < self.len, "index out of bounds");
199        self.len -= 1;
200
201        // Safety: ptr1 will be a valid pointer within our buffer
202        let ptr1 = unsafe { self.ptr().add(index) };
203        // Safety: ptr2 will be a valid pointer within our buffer
204        let ptr2 = unsafe { self.ptr().add(index + 1) };
205
206        // Safety: ptr1 is a valid pointer within our buffer
207        let result = unsafe { core::ptr::read(ptr1) };
208
209        // Safety: All writes will occur within our buffer.
210        unsafe { core::ptr::copy(ptr2, ptr1, self.len - index) };
211
212        result
213    }
214
215    pub fn drain(&mut self) -> Drain<'_, T> {
216        let iter = RawValIter::new(self);
217
218        // this is a mem::forget safety thing. If Drain is forgotten, we just
219        // leak the whole AllocatedVec's contents. Also we need to do this *eventually*
220        // anyway, so why not do it now?
221        self.len = 0;
222
223        Drain {
224            iter,
225            vec: PhantomData,
226        }
227    }
228}
229
230impl<T> DropIn for AllocatedVec<T> {
231    #[inline]
232    unsafe fn drop_in<A: Allocator>(&mut self, alloc: &A) {
233        while self.pop().is_some() {}
234        self.buf.drop_in(alloc)
235    }
236}
237
238impl<T> Deref for AllocatedVec<T> {
239    type Target = [T];
240    fn deref(&self) -> &[T] {
241        // Safety: len <= capacity
242        unsafe { core::slice::from_raw_parts(self.ptr(), self.len) }
243    }
244}
245
246impl<T> DerefMut for AllocatedVec<T> {
247    fn deref_mut(&mut self) -> &mut [T] {
248        // Safety: len <= capacity
249        unsafe { core::slice::from_raw_parts_mut(self.ptr(), self.len) }
250    }
251}
252
253impl<T> IntoIterator for AllocatedVec<T> {
254    type Item = T;
255    type IntoIter = IntoIter<T>;
256    fn into_iter(self) -> IntoIter<T> {
257        let iter = RawValIter::new(&self);
258        // Safety: `buf` is a valid pointer to an array of `T` elements
259        let buf = unsafe { core::ptr::read(&self.buf) };
260        mem::forget(self);
261
262        IntoIter::from_parts(iter, buf)
263    }
264}
265
266pub struct Drain<'a, T: 'a> {
267    vec: PhantomData<&'a mut AllocatedVec<T>>,
268    iter: RawValIter<T>,
269}
270
271impl<'a, T> Iterator for Drain<'a, T> {
272    type Item = T;
273    fn next(&mut self) -> Option<T> {
274        self.iter.next()
275    }
276    fn size_hint(&self) -> (usize, Option<usize>) {
277        self.iter.size_hint()
278    }
279}
280
281impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
282    fn next_back(&mut self) -> Option<T> {
283        self.iter.next_back()
284    }
285}
286
287impl<'a, T> Drop for Drain<'a, T> {
288    fn drop(&mut self) {
289        // pre-drain the iter
290        for _ in &mut *self {}
291    }
292}
293
294impl<'a, T, A: Allocator> FromIteratorIn<'a, T, A> for AllocatedVec<T> {
295    #[inline]
296    fn from_iter_in<I: IntoIterator<Item = T>>(
297        alloc: &'a A,
298        iter: I,
299    ) -> DropGuardResult<AllocatedVec<T>, &'a A> {
300        let mut v = AllocatedVec::new_in(alloc)?;
301
302        // Safety: `alloc` is the allocator used to allocate `v`
303        unsafe {
304            v.extend_in(alloc, iter)?;
305        }
306
307        Ok(v)
308    }
309}