cranelift_egraph/
bumpvec.rs

1//! Vectors allocated in arenas, with small per-vector overhead.
2
3use std::marker::PhantomData;
4use std::mem::MaybeUninit;
5use std::ops::Range;
6
7/// A vector of `T` stored within a `BumpArena`.
8///
9/// This is something like a normal `Vec`, except that all accesses
10/// and updates require a separate borrow of the `BumpArena`. This, in
11/// turn, makes the Vec itself very compact: only three `u32`s (12
12/// bytes). The `BumpSlice` variant is only two `u32`s (8 bytes) and
13/// is sufficient to reconstruct a slice, but not grow the vector.
14///
15/// The `BumpVec` does *not* implement `Clone` or `Copy`; it
16/// represents unique ownership of a range of indices in the arena. If
17/// dropped, those indices will be unavailable until the arena is
18/// freed. This is "fine" (it is normally how arena allocation
19/// works). To explicitly free and make available for some
20/// allocations, a very rudimentary reuse mechanism exists via
21/// `BumpVec::free(arena)`. (The allocation path opportunistically
22/// checks the first range on the freelist, and can carve off a piece
23/// of it if larger than needed, but it does not attempt to traverse
24/// the entire freelist; this is a compromise between bump-allocation
25/// speed and memory efficiency, which also influences speed through
26/// cached-memory reuse.)
27///
28/// The type `T` should not have a `Drop` implementation. This
29/// typically means that it does not own any boxed memory,
30/// sub-collections, or other resources. This is important for the
31/// efficiency of the data structure (otherwise, to call `Drop` impls,
32/// the arena needs to track which indices are live or dead; the
33/// BumpVec itself cannot do the drop because it does not retain a
34/// reference to the arena). Note that placing a `T` with a `Drop`
35/// impl in the arena is still *safe*, because leaking (that is, never
36/// calling `Drop::drop()`) is safe. It is merely less efficient, and
37/// so should be avoided if possible.
38#[derive(Debug)]
39pub struct BumpVec<T> {
40    base: u32,
41    len: u32,
42    cap: u32,
43    _phantom: PhantomData<T>,
44}
45
46/// A slice in an arena: like a `BumpVec`, but has a fixed size that
47/// cannot grow. The size of this struct is one 32-bit word smaller
48/// than `BumpVec`. It is copyable/cloneable because it will never be
49/// freed.
50#[derive(Debug, Clone, Copy)]
51pub struct BumpSlice<T> {
52    base: u32,
53    len: u32,
54    _phantom: PhantomData<T>,
55}
56
57#[derive(Default)]
58pub struct BumpArena<T> {
59    vec: Vec<MaybeUninit<T>>,
60    freelist: Vec<Range<u32>>,
61}
62
63impl<T> BumpArena<T> {
64    /// Create a new arena into which one can allocate `BumpVec`s.
65    pub fn new() -> Self {
66        Self {
67            vec: vec![],
68            freelist: vec![],
69        }
70    }
71
72    /// Create a new arena, pre-allocating space for `cap` total `T`
73    /// elements.
74    pub fn arena_with_capacity(cap: usize) -> Self {
75        Self {
76            vec: Vec::with_capacity(cap),
77            freelist: Vec::with_capacity(cap / 16),
78        }
79    }
80
81    /// Create a new `BumpVec` with the given pre-allocated capacity
82    /// and zero length.
83    pub fn vec_with_capacity(&mut self, cap: usize) -> BumpVec<T> {
84        let cap = u32::try_from(cap).unwrap();
85        if let Some(range) = self.maybe_freelist_alloc(cap) {
86            BumpVec {
87                base: range.start,
88                len: 0,
89                cap,
90                _phantom: PhantomData,
91            }
92        } else {
93            let base = self.vec.len() as u32;
94            for _ in 0..cap {
95                self.vec.push(MaybeUninit::uninit());
96            }
97            BumpVec {
98                base,
99                len: 0,
100                cap,
101                _phantom: PhantomData,
102            }
103        }
104    }
105
106    /// Create a new `BumpVec` with a single element. The capacity is
107    /// also only one element; growing the vector further will require
108    /// a reallocation.
109    pub fn single(&mut self, t: T) -> BumpVec<T> {
110        let mut vec = self.vec_with_capacity(1);
111        unsafe {
112            self.write_into_index(vec.base, t);
113        }
114        vec.len = 1;
115        vec
116    }
117
118    /// Create a new `BumpVec` with the sequence from an iterator.
119    pub fn from_iter<I: Iterator<Item = T>>(&mut self, i: I) -> BumpVec<T> {
120        let base = self.vec.len() as u32;
121        self.vec.extend(i.map(|item| MaybeUninit::new(item)));
122        let len = self.vec.len() as u32 - base;
123        BumpVec {
124            base,
125            len,
126            cap: len,
127            _phantom: PhantomData,
128        }
129    }
130
131    /// Append two `BumpVec`s, returning a new one. Consumes both
132    /// vectors. This will use the capacity at the end of `a` if
133    /// possible to move `b`'s elements into place; otherwise it will
134    /// need to allocate new space.
135    pub fn append(&mut self, a: BumpVec<T>, b: BumpVec<T>) -> BumpVec<T> {
136        if (a.cap - a.len) >= b.len {
137            self.append_into_cap(a, b)
138        } else {
139            self.append_into_new(a, b)
140        }
141    }
142
143    /// Helper: read the `T` out of a given arena index. After
144    /// reading, that index becomes uninitialized.
145    unsafe fn read_out_of_index(&self, index: u32) -> T {
146        // Note that we don't actually *track* uninitialized status
147        // (and this is fine because we will never `Drop` and we never
148        // allow a `BumpVec` to refer to an uninitialized index, so
149        // the bits are effectively dead). We simply read the bits out
150        // and return them.
151        self.vec[index as usize].as_ptr().read()
152    }
153
154    /// Helper: write a `T` into the given arena index. Index must
155    /// have been uninitialized previously.
156    unsafe fn write_into_index(&mut self, index: u32, t: T) {
157        self.vec[index as usize].as_mut_ptr().write(t);
158    }
159
160    /// Helper: move a `T` from one index to another. Old index
161    /// becomes uninitialized and new index must have previously been
162    /// uninitialized.
163    unsafe fn move_item(&mut self, from: u32, to: u32) {
164        let item = self.read_out_of_index(from);
165        self.write_into_index(to, item);
166    }
167
168    /// Helper: push a `T` onto the end of the arena, growing its
169    /// storage. The `T` to push is read out of another index, and
170    /// that index subsequently becomes uninitialized.
171    unsafe fn push_item(&mut self, from: u32) -> u32 {
172        let index = self.vec.len() as u32;
173        let item = self.read_out_of_index(from);
174        self.vec.push(MaybeUninit::new(item));
175        index
176    }
177
178    /// Helper: append `b` into the capacity at the end of `a`.
179    fn append_into_cap(&mut self, mut a: BumpVec<T>, b: BumpVec<T>) -> BumpVec<T> {
180        debug_assert!(a.cap - a.len >= b.len);
181        for i in 0..b.len {
182            // Safety: initially, the indices in `b` are initialized;
183            // the indices in `a`'s cap, beyond its length, are
184            // uninitialized. We move the initialized contents from
185            // `b` to the tail beyond `a`, and we consume `b` (so it
186            // no longer exists), and we update `a`'s length to cover
187            // the initialized contents in their new location.
188            unsafe {
189                self.move_item(b.base + i, a.base + a.len + i);
190            }
191        }
192        a.len += b.len;
193        b.free(self);
194        a
195    }
196
197    /// Helper: return a range of indices that are available
198    /// (uninitialized) according to the freelist for `len` elements,
199    /// if possible.
200    fn maybe_freelist_alloc(&mut self, len: u32) -> Option<Range<u32>> {
201        if let Some(entry) = self.freelist.last_mut() {
202            if entry.len() >= len as usize {
203                let base = entry.start;
204                entry.start += len;
205                if entry.start == entry.end {
206                    self.freelist.pop();
207                }
208                return Some(base..(base + len));
209            }
210        }
211        None
212    }
213
214    /// Helper: append `a` and `b` into a completely new allocation.
215    fn append_into_new(&mut self, a: BumpVec<T>, b: BumpVec<T>) -> BumpVec<T> {
216        // New capacity: round up to a power of two.
217        let len = a.len + b.len;
218        let cap = round_up_power_of_two(len);
219
220        if let Some(range) = self.maybe_freelist_alloc(cap) {
221            for i in 0..a.len {
222                // Safety: the indices in `a` must be initialized. We read
223                // out the item and copy it to a new index; the old index
224                // is no longer covered by a BumpVec, because we consume
225                // `a`.
226                unsafe {
227                    self.move_item(a.base + i, range.start + i);
228                }
229            }
230            for i in 0..b.len {
231                // Safety: the indices in `b` must be initialized. We read
232                // out the item and copy it to a new index; the old index
233                // is no longer covered by a BumpVec, because we consume
234                // `b`.
235                unsafe {
236                    self.move_item(b.base + i, range.start + a.len + i);
237                }
238            }
239
240            a.free(self);
241            b.free(self);
242
243            BumpVec {
244                base: range.start,
245                len,
246                cap,
247                _phantom: PhantomData,
248            }
249        } else {
250            self.vec.reserve(cap as usize);
251            let base = self.vec.len() as u32;
252            for i in 0..a.len {
253                // Safety: the indices in `a` must be initialized. We read
254                // out the item and copy it to a new index; the old index
255                // is no longer covered by a BumpVec, because we consume
256                // `a`.
257                unsafe {
258                    self.push_item(a.base + i);
259                }
260            }
261            for i in 0..b.len {
262                // Safety: the indices in `b` must be initialized. We read
263                // out the item and copy it to a new index; the old index
264                // is no longer covered by a BumpVec, because we consume
265                // `b`.
266                unsafe {
267                    self.push_item(b.base + i);
268                }
269            }
270            let len = self.vec.len() as u32 - base;
271
272            for _ in len..cap {
273                self.vec.push(MaybeUninit::uninit());
274            }
275
276            a.free(self);
277            b.free(self);
278
279            BumpVec {
280                base,
281                len,
282                cap,
283                _phantom: PhantomData,
284            }
285        }
286    }
287
288    /// Returns the size of the backing `Vec`.
289    pub fn size(&self) -> usize {
290        self.vec.len()
291    }
292}
293
294fn round_up_power_of_two(x: u32) -> u32 {
295    debug_assert!(x > 0);
296    debug_assert!(x < 0x8000_0000);
297    let log2 = 32 - (x - 1).leading_zeros();
298    1 << log2
299}
300
301impl<T> BumpVec<T> {
302    /// Returns a slice view of this `BumpVec`, given a borrow of the
303    /// arena.
304    pub fn as_slice<'a>(&'a self, arena: &'a BumpArena<T>) -> &'a [T] {
305        let maybe_uninit_slice =
306            &arena.vec[(self.base as usize)..((self.base + self.len) as usize)];
307        // Safety: the index range we represent must be initialized.
308        unsafe { std::mem::transmute(maybe_uninit_slice) }
309    }
310
311    /// Returns a mutable slice view of this `BumpVec`, given a
312    /// mutable borrow of the arena.
313    pub fn as_mut_slice<'a>(&'a mut self, arena: &'a mut BumpArena<T>) -> &'a mut [T] {
314        let maybe_uninit_slice =
315            &mut arena.vec[(self.base as usize)..((self.base + self.len) as usize)];
316        // Safety: the index range we represent must be initialized.
317        unsafe { std::mem::transmute(maybe_uninit_slice) }
318    }
319
320    /// Returns the length of this vector. Does not require access to
321    /// the arena.
322    pub fn len(&self) -> usize {
323        self.len as usize
324    }
325
326    /// Returns the capacity of this vector. Does not require access
327    /// to the arena.
328    pub fn cap(&self) -> usize {
329        self.cap as usize
330    }
331
332    /// Reserve `extra_len` capacity at the end of the vector,
333    /// reallocating if necessary.
334    pub fn reserve(&mut self, extra_len: usize, arena: &mut BumpArena<T>) {
335        let extra_len = u32::try_from(extra_len).unwrap();
336        if self.cap - self.len < extra_len {
337            if self.base + self.cap == arena.vec.len() as u32 {
338                for _ in 0..extra_len {
339                    arena.vec.push(MaybeUninit::uninit());
340                }
341                self.cap += extra_len;
342            } else {
343                let new_cap = self.cap + extra_len;
344                let new = arena.vec_with_capacity(new_cap as usize);
345                unsafe {
346                    for i in 0..self.len {
347                        arena.move_item(self.base + i, new.base + i);
348                    }
349                }
350                self.base = new.base;
351                self.cap = new.cap;
352            }
353        }
354    }
355
356    /// Push an item, growing the capacity if needed.
357    pub fn push(&mut self, t: T, arena: &mut BumpArena<T>) {
358        if self.cap > self.len {
359            unsafe {
360                arena.write_into_index(self.base + self.len, t);
361            }
362            self.len += 1;
363        } else if (self.base + self.cap) as usize == arena.vec.len() {
364            arena.vec.push(MaybeUninit::new(t));
365            self.cap += 1;
366            self.len += 1;
367        } else {
368            let new_cap = round_up_power_of_two(self.cap + 1);
369            let extra = new_cap - self.cap;
370            self.reserve(extra as usize, arena);
371            unsafe {
372                arena.write_into_index(self.base + self.len, t);
373            }
374            self.len += 1;
375        }
376    }
377
378    /// Clone, if `T` is cloneable.
379    pub fn clone(&self, arena: &mut BumpArena<T>) -> BumpVec<T>
380    where
381        T: Clone,
382    {
383        let mut new = arena.vec_with_capacity(self.len as usize);
384        for i in 0..self.len {
385            let item = self.as_slice(arena)[i as usize].clone();
386            new.push(item, arena);
387        }
388        new
389    }
390
391    /// Truncate the length to a smaller-or-equal length.
392    pub fn truncate(&mut self, len: usize) {
393        let len = len as u32;
394        assert!(len <= self.len);
395        self.len = len;
396    }
397
398    /// Consume the BumpVec and return its indices to a free pool in
399    /// the arena.
400    pub fn free(self, arena: &mut BumpArena<T>) {
401        arena.freelist.push(self.base..(self.base + self.cap));
402    }
403
404    /// Freeze the capacity of this BumpVec, turning it into a slice,
405    /// for a smaller struct (8 bytes rather than 12). Once this
406    /// exists, it is copyable, because the slice will never be freed.
407    pub fn freeze(self, arena: &mut BumpArena<T>) -> BumpSlice<T> {
408        if self.cap > self.len {
409            arena
410                .freelist
411                .push((self.base + self.len)..(self.base + self.cap));
412        }
413        BumpSlice {
414            base: self.base,
415            len: self.len,
416            _phantom: PhantomData,
417        }
418    }
419}
420
421impl<T> BumpSlice<T> {
422    /// Returns a slice view of the `BumpSlice`, given a borrow of the
423    /// arena.
424    pub fn as_slice<'a>(&'a self, arena: &'a BumpArena<T>) -> &'a [T] {
425        let maybe_uninit_slice =
426            &arena.vec[(self.base as usize)..((self.base + self.len) as usize)];
427        // Safety: the index range we represent must be initialized.
428        unsafe { std::mem::transmute(maybe_uninit_slice) }
429    }
430
431    /// Returns a mutable slice view of the `BumpSlice`, given a
432    /// mutable borrow of the arena.
433    pub fn as_mut_slice<'a>(&'a mut self, arena: &'a mut BumpArena<T>) -> &'a mut [T] {
434        let maybe_uninit_slice =
435            &mut arena.vec[(self.base as usize)..((self.base + self.len) as usize)];
436        // Safety: the index range we represent must be initialized.
437        unsafe { std::mem::transmute(maybe_uninit_slice) }
438    }
439
440    /// Returns the length of the `BumpSlice`.
441    pub fn len(&self) -> usize {
442        self.len as usize
443    }
444}
445
446impl<T> std::default::Default for BumpVec<T> {
447    fn default() -> Self {
448        BumpVec {
449            base: 0,
450            len: 0,
451            cap: 0,
452            _phantom: PhantomData,
453        }
454    }
455}
456
457impl<T> std::default::Default for BumpSlice<T> {
458    fn default() -> Self {
459        BumpSlice {
460            base: 0,
461            len: 0,
462            _phantom: PhantomData,
463        }
464    }
465}
466
467#[cfg(test)]
468mod test {
469    use super::*;
470
471    #[test]
472    fn test_round_up() {
473        assert_eq!(1, round_up_power_of_two(1));
474        assert_eq!(2, round_up_power_of_two(2));
475        assert_eq!(4, round_up_power_of_two(3));
476        assert_eq!(4, round_up_power_of_two(4));
477        assert_eq!(32, round_up_power_of_two(24));
478        assert_eq!(0x8000_0000, round_up_power_of_two(0x7fff_ffff));
479    }
480
481    #[test]
482    fn test_basic() {
483        let mut arena: BumpArena<u32> = BumpArena::new();
484
485        let a = arena.single(1);
486        let b = arena.single(2);
487        let c = arena.single(3);
488        let ab = arena.append(a, b);
489        assert_eq!(ab.as_slice(&arena), &[1, 2]);
490        assert_eq!(ab.cap(), 2);
491        let abc = arena.append(ab, c);
492        assert_eq!(abc.len(), 3);
493        assert_eq!(abc.cap(), 4);
494        assert_eq!(abc.as_slice(&arena), &[1, 2, 3]);
495        assert_eq!(arena.size(), 9);
496        let mut d = arena.single(4);
497        // Should have reused the freelist.
498        assert_eq!(arena.size(), 9);
499        assert_eq!(d.len(), 1);
500        assert_eq!(d.cap(), 1);
501        assert_eq!(d.as_slice(&arena), &[4]);
502        d.as_mut_slice(&mut arena)[0] = 5;
503        assert_eq!(d.as_slice(&arena), &[5]);
504        abc.free(&mut arena);
505        let d2 = d.clone(&mut arena);
506        let dd = arena.append(d, d2);
507        // Should have reused the freelist.
508        assert_eq!(arena.size(), 9);
509        assert_eq!(dd.as_slice(&arena), &[5, 5]);
510        let mut e = arena.from_iter([10, 11, 12].into_iter());
511        e.push(13, &mut arena);
512        assert_eq!(arena.size(), 13);
513        e.reserve(4, &mut arena);
514        assert_eq!(arena.size(), 17);
515        let _f = arena.from_iter([1, 2, 3, 4, 5, 6, 7, 8].into_iter());
516        assert_eq!(arena.size(), 25);
517        e.reserve(8, &mut arena);
518        assert_eq!(e.cap(), 16);
519        assert_eq!(e.as_slice(&arena), &[10, 11, 12, 13]);
520        // `e` must have been copied now that `f` is at the end of the
521        // arena.
522        assert_eq!(arena.size(), 41);
523    }
524}