typed_arena_nomut/
lib.rs

1//! The arena, a fast but limited type of allocator.
2//!
3//! **A fast (but limited) allocation arena for values of a single type.**
4//!
5//! Allocated objects are destroyed all at once, when the arena itself is
6//! destroyed. There is no deallocation of individual objects while the arena
7//! itself is still alive. The flipside is that allocation is fast: typically
8//! just a vector push.
9//!
10//! There is also a method `into_vec()` to recover ownership of allocated
11//! objects when the arena is no longer required, instead of destroying
12//! everything.
13//!
14//! ## Example
15//!
16//! ```
17//! use typed_arena_nomut::Arena;
18//!
19//! struct Monster {
20//!     level: u32,
21//! }
22//!
23//! let monsters = Arena::new();
24//!
25//! let goku = monsters.alloc(Monster { level: 9001 });
26//! assert!(goku.level > 9000);
27//! ```
28//!
29//! ## Safe Cycles
30//!
31//! All allocated objects get the same lifetime, so you can safely create cycles
32//! between them. This can be useful for certain data structures, such as graphs
33//! and trees with parent pointers.
34//!
35//! ```
36//! use std::cell::Cell;
37//! use typed_arena_nomut::Arena;
38//!
39//! struct CycleParticipant<'a> {
40//!     other: Cell<Option<&'a CycleParticipant<'a>>>,
41//! }
42//!
43//! let arena = Arena::new();
44//!
45//! let a = arena.alloc(CycleParticipant { other: Cell::new(None) });
46//! let b = arena.alloc(CycleParticipant { other: Cell::new(None) });
47//!
48//! a.other.set(Some(b));
49//! b.other.set(Some(a));
50//! ```
51
52// Potential optimizations:
53// 1) add and stabilize a method for in-place reallocation of vecs.
54// 2) add and stabilize placement new.
55// 3) use an iterator. This may add far too much unsafe code.
56
57#![deny(missing_docs)]
58#![cfg_attr(not(any(feature = "std", test)), no_std)]
59
60#[cfg(not(feature = "std"))]
61extern crate alloc;
62
63#[cfg(any(feature = "std", test))]
64extern crate core;
65
66#[cfg(not(feature = "std"))]
67use alloc::vec::Vec;
68
69use core::cell::RefCell;
70use core::cmp;
71use core::iter;
72use core::mem;
73use core::slice;
74use core::str;
75use std::cell::Ref;
76
77use mem::MaybeUninit;
78
79#[cfg(test)]
80mod test;
81
82// Initial size in bytes.
83const INITIAL_SIZE: usize = 1024;
84// Minimum capacity. Must be larger than 0.
85const MIN_CAPACITY: usize = 1;
86
87/// An arena of objects of type `T`.
88///
89/// ## Example
90///
91/// ```
92/// use typed_arena_nomut::Arena;
93///
94/// struct Monster {
95///     level: u32,
96/// }
97///
98/// let monsters = Arena::new();
99///
100/// let vegeta = monsters.alloc(Monster { level: 9001 });
101/// assert!(vegeta.level > 9000);
102/// ```
103pub struct Arena<T> {
104    chunks: RefCell<ChunkList<T>>,
105}
106
107struct ChunkList<T> {
108    current: Vec<T>,
109    rest: Vec<Vec<T>>,
110}
111
112impl<T> Arena<T> {
113    /// Construct a new arena.
114    ///
115    /// ## Example
116    ///
117    /// ```
118    /// use typed_arena_nomut::Arena;
119    ///
120    /// let arena = Arena::new();
121    /// # arena.alloc(1);
122    /// ```
123    pub fn new() -> Arena<T> {
124        let size = cmp::max(1, mem::size_of::<T>());
125        Arena::with_capacity(INITIAL_SIZE / size)
126    }
127
128    /// Construct a new arena with capacity for `n` values pre-allocated.
129    ///
130    /// ## Example
131    ///
132    /// ```
133    /// use typed_arena_nomut::Arena;
134    ///
135    /// let arena = Arena::with_capacity(1337);
136    /// # arena.alloc(1);
137    /// ```
138    pub fn with_capacity(n: usize) -> Arena<T> {
139        let n = cmp::max(MIN_CAPACITY, n);
140        Arena {
141            chunks: RefCell::new(ChunkList {
142                current: Vec::with_capacity(n),
143                rest: Vec::new(),
144            }),
145        }
146    }
147
148    /// Return the size of the arena
149    ///
150    /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces.
151    ///
152    /// ## Example
153    ///
154    /// ```
155    ///  use typed_arena_nomut::Arena;
156    ///
157    ///  let arena = Arena::with_capacity(0);
158    ///  let a = arena.alloc(1);
159    ///  let b = arena.alloc(2);
160    ///
161    ///  assert_eq!(arena.len(), 2);
162    /// ```
163    pub fn len(&self) -> usize {
164        let chunks = self.chunks.borrow();
165
166        let mut res = 0;
167        for vec in chunks.rest.iter() {
168            res += vec.len()
169        }
170
171        res + chunks.current.len()
172    }
173
174    /// Allocates a value in the arena, and returns a mutable reference
175    /// to that value.
176    ///
177    /// ## Example
178    ///
179    /// ```
180    /// use typed_arena_nomut::Arena;
181    ///
182    /// let arena = Arena::new();
183    /// let x = arena.alloc(42);
184    /// assert_eq!(*x, 42);
185    /// ```
186    #[inline]
187    pub fn alloc(&self, value: T) -> &T {
188        self.alloc_fast_path(value)
189            .unwrap_or_else(|value| self.alloc_slow_path(value))
190    }
191
192    #[inline]
193    fn alloc_fast_path(&self, value: T) -> Result<&T, T> {
194        let mut chunks = self.chunks.borrow_mut();
195        let len = chunks.current.len();
196        if len < chunks.current.capacity() {
197            chunks.current.push(value);
198            // Avoid going through `Vec::deref_mut`, which overlaps
199            // other references we have already handed out!
200            debug_assert!(len < chunks.current.len()); // bounds check
201            Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) })
202        } else {
203            Err(value)
204        }
205    }
206
207    fn alloc_slow_path(&self, value: T) -> &T {
208        &self.alloc_extend(iter::once(value))[0]
209    }
210
211    /// Uses the contents of an iterator to allocate values in the arena.
212    /// Returns a mutable slice that contains these values.
213    ///
214    /// ## Example
215    ///
216    /// ```
217    /// use typed_arena_nomut::Arena;
218    ///
219    /// let arena = Arena::new();
220    /// let abc = arena.alloc_extend("abcdefg".chars().take(3));
221    /// assert_eq!(abc, ['a', 'b', 'c']);
222    /// ```
223    pub fn alloc_extend<I>(&self, iterable: I) -> &[T]
224    where
225        I: IntoIterator<Item = T>,
226    {
227        let mut iter = iterable.into_iter();
228
229        let mut chunks = self.chunks.borrow_mut();
230
231        let iter_min_len = iter.size_hint().0;
232        let mut next_item_index;
233        debug_assert!(
234            chunks.current.capacity() >= chunks.current.len(),
235            "capacity is always greater than or equal to len, so we don't need to worry about underflow"
236        );
237        if iter_min_len > chunks.current.capacity() - chunks.current.len() {
238            chunks.reserve(iter_min_len);
239            chunks.current.extend(iter);
240            next_item_index = 0;
241        } else {
242            next_item_index = chunks.current.len();
243            let mut i = 0;
244            while let Some(elem) = iter.next() {
245                if chunks.current.len() == chunks.current.capacity() {
246                    // The iterator was larger than we could fit into the current chunk.
247                    let chunks = &mut *chunks;
248                    // Create a new chunk into which we can freely push the entire iterator into
249                    chunks.reserve(i + 1);
250                    let previous_chunk = chunks.rest.last_mut().unwrap();
251                    let previous_chunk_len = previous_chunk.len();
252                    // Move any elements we put into the previous chunk into this new chunk
253                    chunks
254                        .current
255                        .extend(previous_chunk.drain(previous_chunk_len - i..));
256                    chunks.current.push(elem);
257                    // And the remaining elements in the iterator
258                    chunks.current.extend(iter);
259                    next_item_index = 0;
260                    break;
261                } else {
262                    chunks.current.push(elem);
263                }
264                i += 1;
265            }
266        }
267        let new_slice_ref = &mut chunks.current[next_item_index..];
268
269        // Extend the lifetime from that of `chunks_borrow` to that of `self`.
270        // This is OK because we’re careful to never move items
271        // by never pushing to inner `Vec`s beyond their initial capacity.
272        // The returned reference is unique (`&mut`):
273        // the `Arena` never gives away references to existing items.
274        unsafe { mem::transmute::<&mut [T], &mut [T]>(new_slice_ref) }
275    }
276
277    /// Allocates space for a given number of values, but doesn't initialize it.
278    ///
279    /// ## Safety
280    ///
281    /// After calling this method, the arena considers the elements initialized. If you fail to
282    /// initialize them (which includes because of panicking during the initialization), the arena
283    /// will run destructors on the uninitialized memory. Therefore, you must initialize them.
284    ///
285    /// Considering how easy it is to cause undefined behaviour using this, you're advised to
286    /// prefer the other (safe) methods, like [`alloc_extend`][Arena::alloc_extend].
287    ///
288    /// ## Example
289    ///
290    /// ```rust
291    /// use std::mem::{self, MaybeUninit};
292    /// use std::ptr;
293    /// use typed_arena_nomut::Arena;
294    ///
295    /// // Transmute from MaybeUninit slice to slice of initialized T.
296    /// // It is a separate function to preserve the lifetime of the reference.
297    /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
298    ///     mem::transmute(r)
299    /// }
300    ///
301    /// let arena: Arena<bool> = Arena::new();
302    /// let slice: &mut [bool];
303    /// unsafe {
304    ///     let uninitialized = arena.alloc_uninitialized(10);
305    ///     for elem in uninitialized.iter_mut() {
306    ///         ptr::write(elem.as_mut_ptr(), true);
307    ///     }
308    ///     slice = transmute_uninit(uninitialized);
309    /// }
310    /// ```
311    ///
312    /// ## Alternative allocation pattern
313    ///
314    /// To avoid the problem of dropping assumed to be initialized elements on panic, it is also
315    /// possible to combine the [`reserve_extend`][Arena::reserve_extend] with
316    /// [`uninitialized_array`][Arena::uninitialized_array], initialize the elements and confirm
317    /// them by this method. In such case, when there's a panic during initialization, the already
318    /// initialized elements would leak but it wouldn't cause UB.
319    ///
320    /// ```rust
321    /// use std::mem::{self, MaybeUninit};
322    /// use std::ptr;
323    /// use typed_arena_nomut::Arena;
324    ///
325    /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
326    ///     mem::transmute(r)
327    /// }
328    ///
329    /// const COUNT: usize = 2;
330    ///
331    /// let arena: Arena<String> = Arena::new();
332    ///
333    /// arena.reserve_extend(COUNT);
334    /// let slice: &mut [String];
335    /// unsafe {
336    ///     // Perform initialization before we claim the memory.
337    ///     let uninitialized = arena.uninitialized_array();
338    ///     assert!((*uninitialized).len() >= COUNT); // Ensured by the reserve_extend
339    ///     for elem in &mut (*uninitialized)[..COUNT] {
340    ///         ptr::write(elem.as_mut_ptr(), "Hello".to_owned());
341    ///     }
342    ///     let addr = (*uninitialized).as_ptr() as usize;
343    ///
344    ///     // The alloc_uninitialized returns the same memory, but "confirms" its allocation.
345    ///     slice = transmute_uninit(arena.alloc_uninitialized(COUNT));
346    ///     assert_eq!(addr, slice.as_ptr() as usize);
347    ///     assert_eq!(slice, &["Hello".to_owned(), "Hello".to_owned()]);
348    /// }
349    /// ```
350    pub unsafe fn alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit<T>] {
351        let mut chunks = self.chunks.borrow_mut();
352
353        debug_assert!(
354            chunks.current.capacity() >= chunks.current.len(),
355            "capacity is always greater than or equal to len, so we don't need to worry about underflow"
356        );
357        if num > chunks.current.capacity() - chunks.current.len() {
358            chunks.reserve(num);
359        }
360
361        // At this point, the current chunk must have free capacity.
362        let next_item_index = chunks.current.len();
363        chunks.current.set_len(next_item_index + num);
364
365        // Go through pointers, to make sure we never create a reference to uninitialized T.
366        let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
367        let start_uninit = start as *mut MaybeUninit<T>;
368        slice::from_raw_parts_mut(start_uninit, num)
369    }
370
371    /// Makes sure there's enough continuous space for at least `num` elements.
372    ///
373    /// This may save some work if called before [`alloc_extend`][Arena::alloc_extend]. It also
374    /// allows somewhat safer use pattern of [`alloc_uninitialized`][Arena::alloc_uninitialized].
375    /// On the other hand this might waste up to `n - 1` elements of space. In case new allocation
376    /// is needed, the unused ones in current chunk are never used.
377    pub fn reserve_extend(&self, num: usize) {
378        let mut chunks = self.chunks.borrow_mut();
379
380        debug_assert!(
381            chunks.current.capacity() >= chunks.current.len(),
382            "capacity is always greater than or equal to len, so we don't need to worry about underflow"
383        );
384        if num > chunks.current.capacity() - chunks.current.len() {
385            chunks.reserve(num);
386        }
387    }
388
389    /// Returns unused space.
390    ///
391    /// *This unused space is still not considered "allocated".* Therefore, it
392    /// won't be dropped unless there are further calls to `alloc`,
393    /// [`alloc_uninitialized`][Arena::alloc_uninitialized], or
394    /// [`alloc_extend`][Arena::alloc_extend] which is why the method is safe.
395    ///
396    /// It returns a raw pointer to avoid creating multiple mutable references to the same place.
397    /// It is up to the caller not to dereference it after any of the `alloc_` methods are called.
398    pub fn uninitialized_array(&self) -> *mut [MaybeUninit<T>] {
399        let mut chunks = self.chunks.borrow_mut();
400        let len = chunks.current.capacity() - chunks.current.len();
401        let next_item_index = chunks.current.len();
402
403        unsafe {
404        // Go through pointers, to make sure we never create a reference to uninitialized T.
405            let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
406            let start_uninit = start as *mut MaybeUninit<T>;
407            slice::from_raw_parts_mut(start_uninit, len) as *mut _
408        }
409    }
410
411    /// Convert this `Arena` into a `Vec<T>`.
412    ///
413    /// Items in the resulting `Vec<T>` appear in the order that they were
414    /// allocated in.
415    ///
416    /// ## Example
417    ///
418    /// ```
419    /// use typed_arena_nomut::Arena;
420    ///
421    /// let arena = Arena::new();
422    ///
423    /// arena.alloc("a");
424    /// arena.alloc("b");
425    /// arena.alloc("c");
426    ///
427    /// let easy_as_123 = arena.into_vec();
428    ///
429    /// assert_eq!(easy_as_123, vec!["a", "b", "c"]);
430    /// ```
431    pub fn into_vec(self) -> Vec<T> {
432        let mut chunks = self.chunks.into_inner();
433        // keep order of allocation in the resulting Vec
434        let n = chunks
435            .rest
436            .iter()
437            .fold(chunks.current.len(), |a, v| a + v.len());
438        let mut result = Vec::with_capacity(n);
439        for mut vec in chunks.rest {
440            result.append(&mut vec);
441        }
442        result.append(&mut chunks.current);
443        result
444    }
445
446    /// Returns an iterator that allows modifying each value.
447    ///
448    /// Items are yielded in the order that they were allocated.
449    ///
450    /// ## Example
451    ///
452    /// ```
453    /// use typed_arena_nomut::Arena;
454    /// use std::cell::Cell;
455    ///
456    /// #[derive(Debug, PartialEq, Eq)]
457    /// struct Point { x: Cell<i32>, y: i32 };
458    ///
459    /// let mut arena = Arena::new();
460    ///
461    /// arena.alloc(Point { x: Cell::new(0), y: 0 });
462    /// arena.alloc(Point { x: Cell::new(1), y: 1 });
463    ///
464    /// for point in arena.iter() {
465    ///     point.x.set(point.x.get() + 10);
466    /// }
467    ///
468    /// let points = arena.into_vec();
469    ///
470    /// assert_eq!(points, vec![Point { x: Cell::new(10), y: 0 }, Point { x: Cell::new(11), y: 1 }]);
471    ///
472    /// ```
473    ///
474    /// ## Immutable Iteration
475    ///
476    /// Note that there is no corresponding `iter` method. Access to the arena's contents
477    /// requries mutable access to the arena itself.
478    ///
479    /// ```
480    /// use typed_arena_nomut::Arena;
481    /// use std::cell::Cell;
482    ///
483    /// let mut arena = Arena::new();
484    /// let x = arena.alloc(Cell::new(1));
485    ///
486    /// for i in arena.iter() {
487    ///     println!("i: {}", i.get());
488    /// }
489    ///
490    /// x.set(x.get() * 2);
491    /// ```
492    #[inline]
493    pub fn iter(&self) -> Iter<T> {
494        let chunks = self.chunks.borrow();
495        let position = if !chunks.rest.is_empty() {
496            let index = 0;
497            let inner_iter = chunks.rest[index].iter();
498            // Extend the lifetime of the individual elements to that of the arena.
499            // This is OK because we borrow the arena mutably to prevent new allocations
500            // and we take care here to never move items inside the arena while the
501            // iterator is alive.
502            let inner_iter = unsafe { mem::transmute(inner_iter) };
503            IterState::ChunkListRest { index, inner_iter }
504        } else {
505            // Extend the lifetime of the individual elements to that of the arena.
506            let iter = unsafe { mem::transmute(chunks.current.iter()) };
507            IterState::ChunkListCurrent { iter }
508        };
509        Iter {
510            chunks,
511            state: position,
512        }
513    }
514}
515
516impl Arena<u8> {
517    /// Allocates a string slice and returns a mutable reference to it.
518    ///
519    /// This is on `Arena<u8>`, because string slices use byte slices (`[u8]`) as their backing
520    /// storage.
521    ///
522    /// # Example
523    ///
524    /// ```
525    /// use typed_arena_nomut::Arena;
526    ///
527    /// let arena: Arena<u8> = Arena::new();
528    /// let hello = arena.alloc_str("Hello world");
529    /// assert_eq!("Hello world", hello);
530    /// ```
531    #[inline]
532    pub fn alloc_str(&self, s: &str) -> & str {
533        let buffer = self.alloc_extend(s.bytes());
534        // Can't fail the utf8 validation, it already came in as utf8
535        unsafe { str::from_utf8_unchecked(buffer) }
536    }
537}
538
539impl<T> Default for Arena<T> {
540    fn default() -> Self {
541        Self::new()
542    }
543}
544
545impl<T> ChunkList<T> {
546    #[inline(never)]
547    #[cold]
548    fn reserve(&mut self, additional: usize) {
549        let double_cap = self
550            .current
551            .capacity()
552            .checked_mul(2)
553            .expect("capacity overflow");
554        let required_cap = additional
555            .checked_next_power_of_two()
556            .expect("capacity overflow");
557        let new_capacity = cmp::max(double_cap, required_cap);
558        let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity));
559        self.rest.push(chunk);
560    }
561}
562
563enum IterState<'a, T> {
564    ChunkListRest {
565        index: usize,
566        inner_iter: slice::Iter<'a, T>,
567    },
568    ChunkListCurrent {
569        iter: slice::Iter<'a, T>,
570    },
571}
572
573/// Mutable arena iterator.
574///
575/// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html).
576pub struct Iter<'a, T: 'a> {
577    chunks: Ref<'a, ChunkList<T>>,
578    state: IterState<'a, T>,
579}
580
581impl<'a, T> Iterator for Iter<'a, T> {
582    type Item = &'a T;
583    fn next(&mut self) -> Option<&'a T> {
584        loop {
585            self.state = match self.state {
586                IterState::ChunkListRest {
587                    mut index,
588                    ref mut inner_iter,
589                } => {
590                    match inner_iter.next() {
591                        Some(item) => return Some(item),
592                        None => {
593                            index += 1;
594                            if index < self.chunks.rest.len() {
595                                let inner_iter = self.chunks.rest[index].iter();
596                                // Extend the lifetime of the individual elements to that of the arena.
597                                let inner_iter = unsafe { mem::transmute(inner_iter) };
598                                IterState::ChunkListRest { index, inner_iter }
599                            } else {
600                                let iter = self.chunks.current.iter();
601                                // Extend the lifetime of the individual elements to that of the arena.
602                                let iter = unsafe { mem::transmute(iter) };
603                                IterState::ChunkListCurrent { iter }
604                            }
605                        }
606                    }
607                }
608                IterState::ChunkListCurrent { ref mut iter } => return iter.next(),
609            };
610        }
611    }
612
613    fn size_hint(&self) -> (usize, Option<usize>) {
614        let current_len = self.chunks.current.len();
615        let current_cap = self.chunks.current.capacity();
616        if self.chunks.rest.is_empty() {
617            (current_len, Some(current_len))
618        } else {
619            let rest_len = self.chunks.rest.len();
620            let last_chunk_len = self
621                .chunks
622                .rest
623                .last()
624                .map(|chunk| chunk.len())
625                .unwrap_or(0);
626
627            let min = current_len + last_chunk_len;
628            let max = min + (rest_len * current_cap / rest_len);
629
630            (min, Some(max))
631        }
632    }
633}