stable_arena/
lib.rs

1//! The arena, a fast but limited type of allocator.
2//!
3//! Arenas are a type of allocator that destroy the objects within, all at
4//! once, once the arena itself is destroyed. They do not support deallocation
5//! of individual objects while the arena itself is still alive. The benefit
6//! of an arena is very fast allocation; just a pointer bump.
7//!
8//! This crate implements two kinds of arena.
9//!
10//! # For types that need to be dropped: `TypedArena`
11//! `TypedArena` is used like this:
12//!
13//! ```rust
14//! use stable_arena::TypedArena;
15//!
16//! let arena: TypedArena<Box<i32>> = TypedArena::default();
17//! let x = arena.alloc(Box::new(42));
18//! assert_eq!(**x, 42);
19//! ```
20//!
21//! (Of course, storing a `Box` in an arena defeats the purpose of the arena,
22//! but you get the idea.)
23//!
24//! A `TypedArena` can only hold objects of one type. It will call `drop` on
25//! all objects when the arena itself is dropped.
26//!
27//! # For types that don't need to be dropped: `DroplessArena`
28//! The advantage of a `DroplessArena` is that it can hold objects of any type.
29//! The disadvantage is that it will not call `drop` on any of them when it is
30//! dropped.
31//!
32//! It can be used like this:
33//!
34//! ```rust
35//! use stable_arena::DroplessArena;
36//!
37//! let arena = DroplessArena::default();
38//! let x = arena.alloc(42);
39//! assert_eq!(*x, 42);
40//! let y = arena.alloc_str("hello");
41//! assert_eq!(y, "hello");
42//! ```
43//!
44//! You can also create reference cycles within a `DroplessArena` and it's
45//! still perfectly safe; the memory will be freed when the arena is dropped.
46//!
47//! ```rust
48//! use std::cell::Cell;
49//! use stable_arena::DroplessArena;
50//!
51//! struct CycleParticipant<'arena> {
52//!     other: Cell<Option<&'arena CycleParticipant<'arena>>>,
53//! }
54//!
55//! let arena = DroplessArena::default();
56//!
57//! let a = arena.alloc(CycleParticipant {
58//!     other: Cell::new(None),
59//! });
60//! let b = arena.alloc(CycleParticipant {
61//!     other: Cell::new(None),
62//! });
63//!
64//! a.other.set(Some(b));
65//! b.other.set(Some(a));
66//! ```
67//!
68//! # Features
69//!
70//! - The `from-iter` feature enables the `alloc_from_iter` method on both arenas. This feature is
71//!   enabled by default.
72
73#![allow(clippy::mut_from_ref)] // Arena allocators are one place where this pattern is fine.
74
75use std::alloc::Layout;
76use std::cell::{Cell, RefCell};
77use std::hint::assert_unchecked;
78use std::marker::PhantomData;
79use std::mem::{self, MaybeUninit};
80use std::ptr::{self, NonNull};
81use std::{cmp, slice};
82
83#[cfg(feature = "from-iter")]
84use smallvec::SmallVec;
85
86#[cfg(feature = "from-iter")]
87/// This calls the passed function while ensuring it won't be inlined into the caller.
88#[inline(never)]
89#[cold]
90fn outline<F: FnOnce() -> R, R>(f: F) -> R {
91    f()
92}
93
94struct ArenaChunk<T = u8> {
95    /// The raw storage for the arena chunk.
96    storage: NonNull<[MaybeUninit<T>]>,
97    /// The number of valid entries in the chunk.
98    entries: usize,
99}
100
101impl<T> Drop for ArenaChunk<T> {
102    fn drop(&mut self) {
103        unsafe { drop(Box::from_raw(self.storage.as_mut())) }
104    }
105}
106
107impl<T> ArenaChunk<T> {
108    #[inline]
109    unsafe fn new(capacity: usize) -> ArenaChunk<T> {
110        ArenaChunk {
111            storage: NonNull::from(Box::leak(Box::new_uninit_slice(capacity))),
112            entries: 0,
113        }
114    }
115
116    /// Destroys this arena chunk.
117    ///
118    /// # Safety
119    ///
120    /// The caller must ensure that `len` elements of this chunk have been initialized.
121    #[inline]
122    unsafe fn destroy(&mut self, len: usize) {
123        // The branch on needs_drop() is an -O1 performance optimization.
124        // Without the branch, dropping TypedArena<T> takes linear time.
125        if mem::needs_drop::<T>() {
126            // SAFETY: The caller must ensure that `len` elements of this chunk have
127            // been initialized.
128            unsafe {
129                let slice = self.storage.as_mut();
130                assume_init_drop(&mut slice[..len]);
131            }
132        }
133    }
134
135    // Returns a pointer to the first allocated object.
136    #[inline]
137    fn start(&mut self) -> *mut T {
138        self.storage.as_ptr() as *mut T
139    }
140
141    // Returns a pointer to the end of the allocated space.
142    #[inline]
143    fn end(&mut self) -> *mut T {
144        unsafe {
145            if mem::size_of::<T>() == 0 {
146                // A pointer as large as possible for zero-sized elements.
147                ptr::without_provenance_mut(!0)
148            } else {
149                self.start().add(self.storage.len())
150            }
151        }
152    }
153}
154
155/// Drops the contained values in place.
156///
157/// The definition corresponds to the unstable method in the standard library.
158/// See https://github.com/rust-lang/rust/issues/63569 for the tracking issue.
159#[inline(always)]
160unsafe fn assume_init_drop<T>(this: &mut [MaybeUninit<T>]) {
161    if !this.is_empty() {
162        // SAFETY: the caller must guarantee that every element of `self`
163        // is initialized and satisfies all invariants of `T`.
164        // Dropping the value in place is safe if that is the case.
165        unsafe { ptr::drop_in_place(this as *mut [MaybeUninit<T>] as *mut [T]) }
166    }
167}
168
169// The arenas start with PAGE-sized chunks, and then each new chunk is twice as
170// big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
171// we stop growing. This scales well, from arenas that are barely used up to
172// arenas that are used for 100s of MiBs. Note also that the chosen sizes match
173// the usual sizes of pages and huge pages on Linux.
174const PAGE: usize = 4096;
175const HUGE_PAGE: usize = 2 * 1024 * 1024;
176
177/// An arena that can hold objects of only one type.
178pub struct TypedArena<T> {
179    /// A pointer to the next object to be allocated.
180    ptr: Cell<*mut T>,
181
182    /// A pointer to the end of the allocated area. When this pointer is
183    /// reached, a new chunk is allocated.
184    end: Cell<*mut T>,
185
186    /// A vector of arena chunks.
187    chunks: RefCell<Vec<ArenaChunk<T>>>,
188
189    /// Marker indicating that dropping the arena causes its owned
190    /// instances of `T` to be dropped.
191    _own: PhantomData<T>,
192}
193
194impl<T> Default for TypedArena<T> {
195    /// Creates a new `TypedArena`.
196    fn default() -> TypedArena<T> {
197        TypedArena {
198            // We set both `ptr` and `end` to 0 so that the first call to
199            // alloc() will trigger a grow().
200            ptr: Cell::new(ptr::null_mut()),
201            end: Cell::new(ptr::null_mut()),
202            chunks: Default::default(),
203            _own: PhantomData,
204        }
205    }
206}
207
208impl<T> TypedArena<T> {
209    /// Allocates an object in the `TypedArena`, returning a mutable reference to it.
210    #[inline]
211    pub fn alloc(&self, object: T) -> &mut T {
212        if self.ptr == self.end {
213            self.grow(1)
214        }
215
216        unsafe {
217            if mem::size_of::<T>() == 0 {
218                self.ptr.set(self.ptr.get().wrapping_byte_add(1));
219                let ptr = ptr::NonNull::<T>::dangling().as_ptr();
220                // Don't drop the object. This `write` is equivalent to `forget`.
221                ptr::write(ptr, object);
222                &mut *ptr
223            } else {
224                let ptr = self.ptr.get();
225                // Advance the pointer.
226                self.ptr.set(self.ptr.get().add(1));
227                // Write into uninitialized memory.
228                ptr::write(ptr, object);
229                &mut *ptr
230            }
231        }
232    }
233
234    #[cfg(feature = "from-iter")]
235    #[inline]
236    fn can_allocate(&self, additional: usize) -> bool {
237        // FIXME: this should *likely* use `offset_from`, but more
238        // investigation is needed (including running tests in miri).
239        let available_bytes = self.end.get().addr() - self.ptr.get().addr();
240        let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
241        available_bytes >= additional_bytes
242    }
243
244    #[cfg(feature = "from-iter")]
245    #[inline]
246    fn alloc_raw_slice(&self, len: usize) -> *mut T {
247        assert!(mem::size_of::<T>() != 0);
248        assert!(len != 0);
249
250        // Ensure the current chunk can fit `len` objects.
251        if !self.can_allocate(len) {
252            self.grow(len);
253            debug_assert!(self.can_allocate(len));
254        }
255
256        let start_ptr = self.ptr.get();
257        // SAFETY: `can_allocate`/`grow` ensures that there is enough space for
258        // `len` elements.
259        unsafe { self.ptr.set(start_ptr.add(len)) };
260        start_ptr
261    }
262
263    #[cfg(feature = "from-iter")]
264    /// Allocates the elements of this iterator into a contiguous slice in the `TypedArena`.
265    ///
266    /// Note: for reasons of reentrancy and panic safety we collect into a `SmallVec<[_; 8]>` before
267    /// storing the elements in the arena.
268    ///
269    /// This function is only available if the `from-iter` feature is enabled.
270    #[inline]
271    pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
272        // Despite the similarlty with `DroplessArena`, we cannot reuse their fast case. The reason
273        // is subtle: these arenas are reentrant. In other words, `iter` may very well be holding a
274        // reference to `self` and adding elements to the arena during iteration.
275        //
276        // For this reason, if we pre-allocated any space for the elements of this iterator, we'd
277        // have to track that some uninitialized elements are followed by some initialized elements,
278        // else we might accidentally drop uninitialized memory if something panics or if the
279        // iterator doesn't fill all the length we expected.
280        //
281        // So we collect all the elements beforehand, which takes care of reentrancy and panic
282        // safety. This function is much less hot than `DroplessArena::alloc_from_iter`, so it
283        // doesn't need to be hyper-optimized.
284        assert!(mem::size_of::<T>() != 0);
285
286        let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
287        if vec.is_empty() {
288            return &mut [];
289        }
290        // Move the content to the arena by copying and then forgetting it.
291        let len = vec.len();
292        let start_ptr = self.alloc_raw_slice(len);
293        unsafe {
294            vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
295            vec.set_len(0);
296            slice::from_raw_parts_mut(start_ptr, len)
297        }
298    }
299
300    /// Grows the arena.
301    #[inline(never)]
302    #[cold]
303    fn grow(&self, additional: usize) {
304        unsafe {
305            // We need the element size to convert chunk sizes (ranging from
306            // PAGE to HUGE_PAGE bytes) to element counts.
307            let elem_size = cmp::max(1, mem::size_of::<T>());
308            let mut chunks = self.chunks.borrow_mut();
309            let mut new_cap;
310            if let Some(last_chunk) = chunks.last_mut() {
311                // If a type is `!needs_drop`, we don't need to keep track of how many elements
312                // the chunk stores - the field will be ignored anyway.
313                if mem::needs_drop::<T>() {
314                    // FIXME: this should *likely* use `offset_from`, but more
315                    // investigation is needed (including running tests in miri).
316                    let used_bytes = self.ptr.get().addr() - last_chunk.start().addr();
317                    last_chunk.entries = used_bytes / mem::size_of::<T>();
318                }
319
320                // If the previous chunk's len is less than HUGE_PAGE
321                // bytes, then this chunk will be least double the previous
322                // chunk's size.
323                new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2);
324                new_cap *= 2;
325            } else {
326                new_cap = PAGE / elem_size;
327            }
328            // Also ensure that this chunk can fit `additional`.
329            new_cap = cmp::max(additional, new_cap);
330
331            let mut chunk = ArenaChunk::<T>::new(new_cap);
332            self.ptr.set(chunk.start());
333            self.end.set(chunk.end());
334            chunks.push(chunk);
335        }
336    }
337
338    // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
339    // chunks.
340    fn clear_last_chunk(&self, last_chunk: &mut ArenaChunk<T>) {
341        // Determine how much was filled.
342        let start = last_chunk.start().addr();
343        // We obtain the value of the pointer to the first uninitialized element.
344        let end = self.ptr.get().addr();
345        // We then calculate the number of elements to be dropped in the last chunk,
346        // which is the filled area's length.
347        let diff = if mem::size_of::<T>() == 0 {
348            // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
349            // the number of zero-sized values in the last and only chunk, just out of caution.
350            // Recall that `end` was incremented for each allocated value.
351            end - start
352        } else {
353            // FIXME: this should *likely* use `offset_from`, but more
354            // investigation is needed (including running tests in miri).
355            (end - start) / mem::size_of::<T>()
356        };
357        // Pass that to the `destroy` method.
358        unsafe {
359            last_chunk.destroy(diff);
360        }
361        // Reset the chunk.
362        self.ptr.set(last_chunk.start());
363    }
364}
365
366impl<T> Drop for TypedArena<T> {
367    fn drop(&mut self) {
368        unsafe {
369            // Determine how much was filled.
370            let mut chunks_borrow = self.chunks.borrow_mut();
371            if let Some(mut last_chunk) = chunks_borrow.pop() {
372                // Drop the contents of the last chunk.
373                self.clear_last_chunk(&mut last_chunk);
374                // The last chunk will be dropped. Destroy all other chunks.
375                for chunk in chunks_borrow.iter_mut() {
376                    chunk.destroy(chunk.entries);
377                }
378            }
379            // Box handles deallocation of `last_chunk` and `self.chunks`.
380        }
381    }
382}
383
384unsafe impl<T: Send> Send for TypedArena<T> {}
385
386#[inline(always)]
387fn align_down(val: usize, align: usize) -> usize {
388    debug_assert!(align.is_power_of_two());
389    val & !(align - 1)
390}
391
392#[inline(always)]
393fn align_up(val: usize, align: usize) -> usize {
394    debug_assert!(align.is_power_of_two());
395    (val + align - 1) & !(align - 1)
396}
397
398// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
399// to optimize away alignment code.
400const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();
401
402/// An arena that can hold objects of multiple different types that impl `Copy`
403/// and/or satisfy `!mem::needs_drop`.
404pub struct DroplessArena {
405    /// A pointer to the start of the free space.
406    start: Cell<*mut u8>,
407
408    /// A pointer to the end of free space.
409    ///
410    /// The allocation proceeds downwards from the end of the chunk towards the
411    /// start. (This is slightly simpler and faster than allocating upwards,
412    /// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
413    /// When this pointer crosses the start pointer, a new chunk is allocated.
414    ///
415    /// This is kept aligned to DROPLESS_ALIGNMENT.
416    end: Cell<*mut u8>,
417
418    /// A vector of arena chunks.
419    chunks: RefCell<Vec<ArenaChunk>>,
420}
421
422unsafe impl Send for DroplessArena {}
423
424impl Default for DroplessArena {
425    #[inline]
426    fn default() -> DroplessArena {
427        DroplessArena {
428            // We set both `start` and `end` to 0 so that the first call to
429            // alloc() will trigger a grow().
430            start: Cell::new(ptr::null_mut()),
431            end: Cell::new(ptr::null_mut()),
432            chunks: Default::default(),
433        }
434    }
435}
436
437impl DroplessArena {
438    #[inline(never)]
439    #[cold]
440    fn grow(&self, layout: Layout) {
441        // Add some padding so we can align `self.end` while
442        // still fitting in a `layout` allocation.
443        let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;
444
445        unsafe {
446            let mut chunks = self.chunks.borrow_mut();
447            let mut new_cap;
448            if let Some(last_chunk) = chunks.last_mut() {
449                // There is no need to update `last_chunk.entries` because that
450                // field isn't used by `DroplessArena`.
451
452                // If the previous chunk's len is less than HUGE_PAGE
453                // bytes, then this chunk will be least double the previous
454                // chunk's size.
455                new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2);
456                new_cap *= 2;
457            } else {
458                new_cap = PAGE;
459            }
460            // Also ensure that this chunk can fit `additional`.
461            new_cap = cmp::max(additional, new_cap);
462
463            let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
464            self.start.set(chunk.start());
465
466            // Align the end to DROPLESS_ALIGNMENT.
467            let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
468
469            // Make sure we don't go past `start`. This should not happen since the allocation
470            // should be at least DROPLESS_ALIGNMENT - 1 bytes.
471            debug_assert!(chunk.start().addr() <= end);
472
473            self.end.set(chunk.end().with_addr(end));
474
475            chunks.push(chunk);
476        }
477    }
478
479    #[inline]
480    fn alloc_raw(&self, layout: Layout) -> *mut u8 {
481        assert!(layout.size() != 0);
482
483        // This loop executes once or twice: if allocation fails the first
484        // time, the `grow` ensures it will succeed the second time.
485        loop {
486            let start = self.start.get().addr();
487            let old_end = self.end.get();
488            let end = old_end.addr();
489
490            // Align allocated bytes so that `self.end` stays aligned to
491            // DROPLESS_ALIGNMENT.
492            let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);
493
494            // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT.
495            unsafe { assert_unchecked(end == align_down(end, DROPLESS_ALIGNMENT)) };
496
497            if let Some(sub) = end.checked_sub(bytes) {
498                let new_end = align_down(sub, layout.align());
499                if start <= new_end {
500                    let new_end = old_end.with_addr(new_end);
501                    // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down`
502                    // preserves alignment as both `end` and `bytes` are already
503                    // aligned to DROPLESS_ALIGNMENT.
504                    self.end.set(new_end);
505                    return new_end;
506                }
507            }
508
509            // No free space left. Allocate a new chunk to satisfy the request.
510            // On failure the grow will panic or abort.
511            self.grow(layout);
512        }
513    }
514
515    /// Allocates an object in the `DroplessArena`, returning a mutable reference to it.
516    #[inline]
517    pub fn alloc<T>(&self, object: T) -> &mut T {
518        assert!(!mem::needs_drop::<T>());
519        assert!(mem::size_of::<T>() != 0);
520
521        let mem = self.alloc_raw(Layout::new::<T>()) as *mut T;
522
523        unsafe {
524            // Write into uninitialized memory.
525            ptr::write(mem, object);
526            &mut *mem
527        }
528    }
529
530    /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
531    /// reference to it. Will panic if passed a zero-sized type.
532    ///
533    /// Panics:
534    ///
535    ///  - Zero-sized types
536    ///  - Zero-length slices
537    #[inline]
538    pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
539    where
540        T: Copy,
541    {
542        assert!(!mem::needs_drop::<T>());
543        assert!(mem::size_of::<T>() != 0);
544        assert!(!slice.is_empty());
545
546        let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
547
548        unsafe {
549            mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
550            slice::from_raw_parts_mut(mem, slice.len())
551        }
552    }
553
554    /// Used by `Lift` to check whether this slice is allocated
555    /// in this arena.
556    #[inline]
557    pub fn contains_slice<T>(&self, slice: &[T]) -> bool {
558        for chunk in self.chunks.borrow_mut().iter_mut() {
559            let ptr = slice.as_ptr().cast::<u8>().cast_mut();
560            if chunk.start() <= ptr && chunk.end() >= ptr {
561                return true;
562            }
563        }
564        false
565    }
566
567    /// Allocates a string slice that is copied into the `DroplessArena`, returning a
568    /// reference to it. Will panic if passed an empty string.
569    ///
570    /// Panics:
571    ///
572    ///  - Zero-length string
573    #[inline]
574    pub fn alloc_str(&self, string: &str) -> &str {
575        let slice = self.alloc_slice(string.as_bytes());
576
577        // SAFETY: the result has a copy of the same valid UTF-8 bytes.
578        unsafe { std::str::from_utf8_unchecked(slice) }
579    }
580
581    #[cfg(feature = "from-iter")]
582    /// # Safety
583    ///
584    /// The caller must ensure that `mem` is valid for writes up to `size_of::<T>() * len`, and that
585    /// that memory stays allocated and not shared for the lifetime of `self`. This must hold even
586    /// if `iter.next()` allocates onto `self`.
587    #[inline]
588    unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
589        &self,
590        mut iter: I,
591        len: usize,
592        mem: *mut T,
593    ) -> &mut [T] {
594        let mut i = 0;
595        // Use a manual loop since LLVM manages to optimize it better for
596        // slice iterators
597        loop {
598            // SAFETY: The caller must ensure that `mem` is valid for writes up to
599            // `size_of::<T>() * len`.
600            unsafe {
601                match iter.next() {
602                    Some(value) if i < len => mem.add(i).write(value),
603                    Some(_) | None => {
604                        // We only return as many items as the iterator gave us, even
605                        // though it was supposed to give us `len`
606                        return slice::from_raw_parts_mut(mem, i);
607                    }
608                }
609            }
610            i += 1;
611        }
612    }
613
614    #[cfg(feature = "from-iter")]
615    /// Allocates the elements of this iterator into a contiguous slice in the `DroplessArena`.
616    ///
617    /// Note: for reasons of reentrancy and panic safety we collect into a `SmallVec<[_; 8]>` before
618    /// storing the elements in the arena.
619    ///
620    /// This function is only available if the `from-iter` feature is enabled.
621    #[inline]
622    pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
623        // Warning: this function is reentrant: `iter` could hold a reference to `&self` and
624        // allocate additional elements while we're iterating.
625        let iter = iter.into_iter();
626        assert!(mem::size_of::<T>() != 0);
627        assert!(!mem::needs_drop::<T>());
628
629        let size_hint = iter.size_hint();
630
631        match size_hint {
632            (min, Some(max)) if min == max => {
633                // We know the exact number of elements the iterator expects to produce here.
634                let len = min;
635
636                if len == 0 {
637                    return &mut [];
638                }
639
640                let mem = self.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
641                // SAFETY: `write_from_iter` doesn't touch `self`. It only touches the slice we just
642                // reserved. If the iterator panics or doesn't output `len` elements, this will
643                // leave some unallocated slots in the arena, which is fine because we do not call
644                // `drop`.
645                unsafe { self.write_from_iter(iter, len, mem) }
646            }
647            (_, _) => {
648                outline(move || -> &mut [T] {
649                    // Takes care of reentrancy.
650                    let mut vec: SmallVec<[_; 8]> = iter.collect();
651                    if vec.is_empty() {
652                        return &mut [];
653                    }
654                    // Move the content to the arena by copying it and then forgetting
655                    // the content of the SmallVec
656                    unsafe {
657                        let len = vec.len();
658                        let start_ptr =
659                            self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T;
660                        vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
661                        vec.set_len(0);
662                        slice::from_raw_parts_mut(start_ptr, len)
663                    }
664                })
665            }
666        }
667    }
668}
669
670// Marker types that let us give different behaviour for arenas allocating
671// `Copy` types vs `!Copy` types.
672pub struct IsCopy;
673pub struct IsNotCopy;
674
675#[cfg(feature = "from-iter")]
676/// Declare an `Arena` containing one dropless arena and many typed arenas (the
677/// types of the typed arenas are specified by the arguments).
678///
679/// There are three cases of interest.
680/// - Types that are `Copy`: these need not be specified in the arguments. They
681///   will use the `DroplessArena` (because `Copy` types cannot be `Drop`)
682/// - Types that are `!Copy` and `!Drop`: these must be specified in the
683///   arguments. An empty `TypedArena` will be created for each one, but the
684///   `DroplessArena` will always be used and the `TypedArena` will stay empty.
685///   This is odd but harmless, because an empty arena allocates no memory.
686/// - Types that are `!Copy` and `Drop`: these must be specified in the
687///   arguments. The `TypedArena` will be used for them.
688///
689/// # Usage
690///
691/// ```rust
692/// use std::cell::Cell;
693/// use stable_arena::{declare_arena};
694///
695/// // Creates a new type called `Arena`.
696/// declare_arena!([
697///     cells: Cell<i32>,  // `!Copy`, `!Drop`
698///     boxes: Box<i32>,  // `!Copy`, `Drop`
699/// ]);
700///
701/// let arena = Arena::default();
702/// let c: &Cell<i32> = arena.alloc(Cell::new(1));
703/// assert_eq!(c.get(), 1);
704/// let b = arena.alloc(Box::new(2));
705/// assert_eq!(**b, 2);
706/// ```
707///
708/// This macro is only available if the `from-iter` feature is enabled.
709#[macro_export]
710macro_rules! declare_arena {
711    ([$($name:ident: $ty:ty,)*]) => {
712        #[derive(Default)]
713        pub struct Arena {
714            pub dropless: $crate::DroplessArena,
715            $($name: $crate::TypedArena<$ty>,)*
716        }
717
718        pub trait ArenaAllocatable<C = $crate::IsNotCopy>: Sized {
719            #[allow(clippy::mut_from_ref)]
720            fn allocate_on(self, arena: &Arena) -> &mut Self;
721            #[allow(clippy::mut_from_ref)]
722            fn allocate_from_iter(
723                arena: &Arena,
724                iter: impl ::std::iter::IntoIterator<Item = Self>,
725            ) -> &mut [Self];
726        }
727
728        // Any type that impls `Copy` can be arena-allocated in the `DroplessArena`.
729        impl<T: Copy> ArenaAllocatable<$crate::IsCopy> for T {
730            #[inline]
731            #[allow(clippy::mut_from_ref)]
732            fn allocate_on(self, arena: &Arena) -> &mut Self {
733                arena.dropless.alloc(self)
734            }
735
736            #[inline]
737            #[allow(clippy::mut_from_ref)]
738            fn allocate_from_iter(
739                arena: &Arena,
740                iter: impl ::std::iter::IntoIterator<Item = Self>,
741            ) -> &mut [Self] {
742                arena.dropless.alloc_from_iter(iter)
743            }
744        }
745
746        $(
747            impl ArenaAllocatable<$crate::IsNotCopy> for $ty {
748                #[inline]
749                #[allow(clippy::mut_from_ref)]
750                fn allocate_on(self, arena: &Arena) -> &mut Self {
751                    if !::std::mem::needs_drop::<Self>() {
752                        arena.dropless.alloc(self)
753                    } else {
754                        arena.$name.alloc(self)
755                    }
756                }
757
758                #[inline]
759                #[allow(clippy::mut_from_ref)]
760                fn allocate_from_iter(
761                    arena: &Arena,
762                    iter: impl ::std::iter::IntoIterator<Item = Self>,
763                ) -> &mut [Self] {
764                    if !::std::mem::needs_drop::<Self>() {
765                        arena.dropless.alloc_from_iter(iter)
766                    } else {
767                        arena.$name.alloc_from_iter(iter)
768                    }
769                }
770            }
771        )*
772
773        impl Arena {
774            #[inline]
775            #[allow(clippy::mut_from_ref)]
776            pub fn alloc<T: ArenaAllocatable<C>, C>(&self, value: T) -> &mut T {
777                value.allocate_on(self)
778            }
779
780            // Any type that impls `Copy` can have slices be arena-allocated in the `DroplessArena`.
781            #[inline]
782            #[allow(clippy::mut_from_ref)]
783            pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
784                if value.is_empty() {
785                    return &mut [];
786                }
787                self.dropless.alloc_slice(value)
788            }
789
790            #[inline]
791            pub fn alloc_str(&self, string: &str) -> &str {
792                if string.is_empty() {
793                    return "";
794                }
795                self.dropless.alloc_str(string)
796            }
797
798            #[allow(clippy::mut_from_ref)]
799            pub fn alloc_from_iter<T: ArenaAllocatable<C>, C>(
800                &self,
801                iter: impl ::std::iter::IntoIterator<Item = T>,
802            ) -> &mut [T] {
803                T::allocate_from_iter(self, iter)
804            }
805        }
806    }
807}
808
809#[cfg(test)]
810mod tests;