ferroc/
arena.rs

1//! The module of the arena collection.
2//!
3//! See [`Arenas`] for information.
4
5mod bitmap;
6
7use core::{
8    alloc::Layout,
9    mem::{self, MaybeUninit},
10    num::NonZeroUsize,
11    panic,
12    ptr::{self, NonNull},
13    sync::atomic::{AtomicPtr, AtomicUsize, Ordering::*},
14};
15
16use self::bitmap::Bitmap;
17use crate::{
18    base::{BaseAlloc, Chunk},
19    slab::{Slab, SlabRef},
20    track,
21};
22
23const BYTE_WIDTH: usize = u8::BITS as usize;
24
25pub const SLAB_SHIFT: usize = 2 + 10 + 10;
26pub const SLAB_SIZE: usize = 1 << SLAB_SHIFT;
27
28pub const fn slab_layout(n: usize) -> Layout {
29    match Layout::from_size_align(n << SLAB_SHIFT, SLAB_SIZE) {
30        Ok(layout) => layout,
31        Err(_) => panic!("invalid slab layout"),
32    }
33}
34
35pub(crate) const SHARD_SHIFT: usize = 6 + 10;
36pub(crate) const SHARD_SIZE: usize = 1 << SHARD_SHIFT;
37
38pub(crate) const SHARD_COUNT: usize = SLAB_SIZE / SHARD_SIZE;
39
40struct Arena<B: BaseAlloc> {
41    arena_id: usize,
42    chunk: Chunk<B>,
43    header: Chunk<B>,
44    is_exclusive: bool,
45    slab_count: usize,
46    search_index: AtomicUsize,
47}
48
49impl<B: BaseAlloc> Arena<B> {
50    const LAYOUT: Layout = Layout::new::<Self>();
51
52    fn header_layout(slab_count: usize, is_exclusive: bool) -> Layout {
53        if !is_exclusive {
54            let bitmap_size = slab_count
55                .div_ceil(BYTE_WIDTH)
56                .next_multiple_of(mem::size_of::<AtomicUsize>());
57            let n = bitmap_size / mem::size_of::<AtomicUsize>();
58
59            let bitmap_layout = Layout::array::<AtomicUsize>(n).unwrap();
60            let (header_layout, offset) = Self::LAYOUT.extend(bitmap_layout).unwrap();
61            assert_eq!(offset, Self::LAYOUT.size());
62            header_layout
63        } else {
64            Self::LAYOUT
65        }
66    }
67
68    /// # Safety
69    ///
70    /// `header` must contains a valid bitmap covering exact all memory blocks
71    /// in `chunk` (i.e. `slab_count`).
72    unsafe fn from_dynamic<'a>(
73        header: Chunk<B>,
74        chunk: Chunk<B>,
75        is_exclusive: bool,
76        slab_count: usize,
77    ) -> &'a mut Arena<B> {
78        // SAFETY: The pointer is properly aligned.
79        let arena = unsafe {
80            let pointer = header.pointer().cast::<Arena<B>>();
81            pointer.as_uninit_mut().write(Arena {
82                arena_id: 0,
83                chunk,
84                header,
85                is_exclusive,
86                slab_count,
87                search_index: Default::default(),
88            })
89        };
90
91        if !is_exclusive {
92            // SAFETY: the bitmap pointer points to a valid & uninit memory block.
93            unsafe {
94                let maybe = arena.bitmap_ptr().as_uninit_slice_mut();
95                maybe.fill(MaybeUninit::new(0));
96            }
97            let bitmap = arena.bitmap();
98            bitmap.set::<true>(slab_count.try_into().unwrap()..bitmap.len());
99        }
100
101        arena
102    }
103
104    fn new<'a>(
105        base: &B,
106        slab_count: usize,
107        align: Option<usize>,
108        is_exclusive: bool,
109    ) -> Result<&'a mut Self, Error<B>> {
110        let layout = match align {
111            Some(align) => slab_layout(slab_count)
112                .align_to(align)
113                .expect("invalid align"),
114            None => slab_layout(slab_count),
115        };
116
117        let header_layout = Self::header_layout(slab_count, is_exclusive);
118
119        let chunk = base.allocate(layout, is_exclusive).map_err(Error::Alloc)?;
120        let header = base.allocate(header_layout, true).map_err(Error::Alloc)?;
121
122        // SAFETY: `header` is valid.
123        Ok(unsafe { Self::from_dynamic(header, chunk, is_exclusive, slab_count) })
124    }
125
126    fn new_chunk<'a>(base: &B, chunk: Chunk<B>) -> Result<&'a mut Self, Error<B>> {
127        let size = chunk.pointer().len();
128        let slab_count = size / SLAB_SIZE;
129        assert!(chunk.pointer().is_aligned_to(SLAB_SIZE));
130
131        let header_layout = Self::header_layout(slab_count, false);
132        let header = base.allocate(header_layout, true).map_err(Error::Alloc)?;
133
134        Ok(unsafe { Self::from_dynamic(header, chunk, false, slab_count) })
135    }
136
137    /// # Safety
138    ///
139    /// `arena` must have no other references alive.
140    unsafe fn drop(arena: NonNull<Self>) {
141        // SAFETY: We read the data first so as to avoid dropping the dropped data.
142        drop(unsafe { arena.read() });
143    }
144
145    fn bitmap_ptr(&self) -> NonNull<[u8]> {
146        let (ptr, _) = self.header.pointer().to_raw_parts();
147        NonNull::from_raw_parts(
148            ptr.map_addr(|addr| addr.checked_add(Self::LAYOUT.size()).unwrap()),
149            self.slab_count
150                .div_ceil(BYTE_WIDTH)
151                .next_multiple_of(mem::size_of::<AtomicUsize>()),
152        )
153    }
154
155    fn bitmap(&self) -> &Bitmap {
156        let (ptr, len) = self.bitmap_ptr().to_raw_parts();
157        let slice = NonNull::from_raw_parts(ptr, len / mem::size_of::<AtomicUsize>());
158        // SAFETY: The bitmap pointer points to a valid `[AtomicUsize]`.
159        Bitmap::new(unsafe { slice.as_ref() })
160    }
161
162    fn allocate_slices(&self, count: usize) -> Option<NonNull<[u8]>> {
163        let start = self.search_index.load(Relaxed);
164        let (idx, bit) = self.bitmap().allocate(start, count.try_into().ok()?)?;
165        self.search_index.store(idx, Relaxed);
166
167        let offset = (idx * BYTE_WIDTH + (bit as usize)) * SLAB_SIZE;
168        // SAFETY: `idx` and `bit` is valid, and thus `offset` is within the chunk
169        // memory range.
170        let data = unsafe { self.chunk.pointer().byte_add(offset) };
171        Some(NonNull::from_raw_parts(data.cast(), SLAB_SIZE * count))
172    }
173
174    #[cfg(debug_assertions)]
175    fn check_ptr(&self, ptr: NonNull<u8>) -> bool {
176        let addr = ptr.as_ptr().addr();
177        let (origin, size) = self.chunk.pointer().to_raw_parts();
178        let origin = origin.as_ptr().addr();
179        (origin..origin + size).contains(&addr)
180    }
181
182    fn allocate(
183        &self,
184        thread_id: u64,
185        count: usize,
186        align: usize,
187        is_large_or_huge: bool,
188        base: &B,
189    ) -> Option<Result<SlabRef, Error<B>>> {
190        debug_assert!(align <= SLAB_SIZE);
191        let ptr = self.allocate_slices(count)?;
192
193        let (addr, _) = ptr.to_raw_parts();
194        let commit = unsafe { base.commit(NonNull::from_raw_parts(addr, mem::size_of::<Slab>())) };
195        // SAFETY: The fresh allocation is aligned to `SLAB_SIZE`.
196        let res = commit.map(|_| unsafe {
197            Slab::init(
198                ptr,
199                thread_id,
200                self.arena_id,
201                is_large_or_huge,
202                B::IS_ZEROED,
203            )
204        });
205        Some(res.map_err(Error::Commit))
206    }
207
208    /// # Safety
209    ///
210    /// - `slab` must be previously allocated from this arena;
211    /// - No more references to the `slab` or its shards exist after calling
212    ///   this function.
213    unsafe fn deallocate(&self, slab: SlabRef, base: &B) -> usize {
214        let raw = slab.into_raw();
215        unsafe { base.decommit(raw) };
216        let (ptr, len) = raw.to_raw_parts();
217        let offset = unsafe { ptr.cast::<u8>().sub_ptr(self.chunk.pointer().cast()) };
218
219        let (start, end) = (offset / SLAB_SIZE, (offset + len) / SLAB_SIZE);
220        self.bitmap().set::<false>((start as u32)..(end as u32));
221        end - start
222    }
223}
224
225const MAX_ARENAS: usize = 112;
226/// A collection of arenas.
227///
228/// This structure manages all the memory within its lifetime. Multiple
229/// [`Context`](crate::heap::Context)s and [`Heap`](crate::heap::Heap)s can have
230/// reference to one instance of this type.
231///
232/// See [the crate-level documentation](crate) for its usage.
233pub struct Arenas<B: BaseAlloc> {
234    pub(crate) base: B,
235    arenas: [AtomicPtr<Arena<B>>; MAX_ARENAS],
236    arena_count: AtomicUsize,
237    // slab_count: AtomicUsize,
238    abandoned: AtomicPtr<()>,
239    abandoned_visited: AtomicPtr<()>,
240}
241
242impl<B: BaseAlloc> Arenas<B> {
243    // We're using this constant to initialize the array, so no real manipulation on
244    // this constant is performed.
245    #[allow(clippy::declare_interior_mutable_const)]
246    const ARENA_INIT: AtomicPtr<Arena<B>> = AtomicPtr::new(ptr::null_mut());
247
248    /// Creates a new collection of arenas.
249    pub const fn new(base: B) -> Self {
250        Arenas {
251            base,
252            arenas: [Self::ARENA_INIT; MAX_ARENAS],
253            arena_count: AtomicUsize::new(0),
254            // slab_count: AtomicUsize::new(0),
255            abandoned: AtomicPtr::new(ptr::null_mut()),
256            abandoned_visited: AtomicPtr::new(ptr::null_mut()),
257        }
258    }
259
260    fn push_arena<'a>(&'a self, arena: &'a mut Arena<B>) -> Result<&'a Arena<B>, Error<B>> {
261        let mut index = self.arena_count.load(Relaxed);
262        loop {
263            break if index < MAX_ARENAS {
264                if let Err(i) = self
265                    .arena_count
266                    .compare_exchange(index, index + 1, AcqRel, Acquire)
267                {
268                    index = i;
269                    continue;
270                }
271                if self.arenas[index]
272                    .compare_exchange(ptr::null_mut(), arena, AcqRel, Acquire)
273                    .is_err()
274                {
275                    index = self.arena_count.load(Relaxed);
276                    continue;
277                }
278                arena.arena_id = index + 1;
279                Ok(arena)
280            } else if let Some(index) = self.arenas.iter().position(|slot| {
281                slot.compare_exchange(ptr::null_mut(), arena, AcqRel, Acquire)
282                    .is_ok()
283            }) {
284                arena.arena_id = index + 1;
285                Ok(arena)
286            } else {
287                #[cfg(not(err_on_exhaustion))]
288                panic!("ARENA EXHAUSTED");
289                #[cfg(err_on_exhaustion)]
290                {
291                    // SAFETY: The arena is freshly allocated.
292                    unsafe { Arena::drop(arena.into()) };
293                    Err(Error::ArenaExhausted)
294                }
295            };
296        }
297    }
298
299    fn push_abandoned(&self, slab: SlabRef) {
300        debug_assert!(slab.is_abandoned());
301        let mut next = self.abandoned.load(Relaxed);
302        loop {
303            slab.abandoned_next.store(next, Relaxed);
304            match self.abandoned.compare_exchange_weak(
305                next,
306                slab.as_ptr().as_ptr(),
307                AcqRel,
308                Acquire,
309            ) {
310                Ok(_) => break,
311                Err(e) => next = e,
312            }
313        }
314    }
315
316    fn push_abandoned_visited(&self, slab: SlabRef) {
317        debug_assert!(slab.is_abandoned());
318        let mut next = self.abandoned_visited.load(Relaxed);
319        loop {
320            slab.abandoned_next.store(next, Relaxed);
321            match self.abandoned_visited.compare_exchange_weak(
322                next,
323                slab.as_ptr().as_ptr(),
324                AcqRel,
325                Acquire,
326            ) {
327                Ok(_) => break,
328                Err(e) => next = e,
329            }
330        }
331    }
332
333    fn reappend_abandoned_visited(&self) -> bool {
334        if self.abandoned_visited.load(Relaxed).is_null() {
335            return false;
336        }
337        let first = self.abandoned_visited.swap(ptr::null_mut(), AcqRel);
338        if first.is_null() {
339            return false;
340        };
341
342        if self.abandoned.load(Relaxed).is_null()
343            && self
344                .abandoned
345                .compare_exchange(ptr::null_mut(), first, AcqRel, Acquire)
346                .is_ok()
347        {
348            return true;
349        }
350
351        let mut last = first;
352        let last = loop {
353            let next = unsafe { (*last.cast::<Slab>()).abandoned_next.load(Relaxed) };
354            last = match next.is_null() {
355                true => break last,
356                false => next,
357            };
358        };
359
360        let mut next = self.abandoned.load(Relaxed);
361        loop {
362            unsafe { (*last.cast::<Slab>()).abandoned_next.store(next, Relaxed) };
363            match self
364                .abandoned
365                .compare_exchange_weak(next, first, AcqRel, Acquire)
366            {
367                Ok(_) => break,
368                Err(e) => next = e,
369            }
370        }
371
372        true
373    }
374
375    fn pop_abandoned(&self) -> Option<SlabRef> {
376        let mut next = self.abandoned.load(Relaxed);
377        if next.is_null() && !self.reappend_abandoned_visited() {
378            return None;
379        }
380        next = self.abandoned.load(Relaxed);
381        loop {
382            let ptr = NonNull::new(next)?;
383            let new_next = unsafe { (*next.cast::<Slab>()).abandoned_next.load(Relaxed) };
384            match self
385                .abandoned
386                .compare_exchange_weak(next, new_next, AcqRel, Acquire)
387            {
388                Ok(_) => break Some(unsafe { SlabRef::from_ptr(ptr) }),
389                Err(e) => next = e,
390            }
391        }
392    }
393
394    fn try_reclaim(
395        &self,
396        thread_id: u64,
397        count: usize,
398        align: usize,
399        is_large_or_huge: bool,
400    ) -> Option<SlabRef> {
401        const MAX_TRIAL: usize = 8;
402        let mut trial = MAX_TRIAL;
403        while trial > 0
404            && let Some(slab) = self.pop_abandoned()
405        {
406            if slab.collect_abandoned()
407                && slab.size >= count * SLAB_SIZE
408                && slab.as_ptr().is_aligned_to(align)
409            {
410                let aid = slab.arena_id;
411                let raw = slab.into_raw();
412                let slab = unsafe { Slab::init(raw, thread_id, aid, is_large_or_huge, false) };
413                return Some(slab);
414            }
415            self.push_abandoned_visited(slab);
416            trial -= 1;
417        }
418        None
419    }
420
421    fn all_arenas(&self) -> impl Iterator<Item = (usize, &Arena<B>)> {
422        let iter = self.arenas[..self.arena_count.load(Acquire)].iter();
423        // SAFETY: We check the nullity of the pointers.
424        iter.enumerate()
425            .filter_map(|(index, arena)| Some((index, unsafe { arena.load(Acquire).as_ref() }?)))
426    }
427
428    fn arenas(&self, is_exclusive: bool) -> impl Iterator<Item = (usize, &Arena<B>)> {
429        self.all_arenas()
430            .filter(move |(_, arena)| arena.is_exclusive == is_exclusive)
431    }
432
433    /// Retrieves the base allocator of this arena collection.
434    pub fn base(&self) -> &B {
435        &self.base
436    }
437
438    /// Manages another chunk previously allocated by an instance of its base
439    /// allocator.
440    ///
441    /// This function creates a new arena from the chunk and push it to the
442    /// collection for further allocation, extending the heap's overall
443    /// capacity.
444    ///
445    /// # Errors
446    ///
447    /// Returns an error if the header allocation has failed, or the collection
448    /// is full of arenas.
449    pub fn manage(&self, chunk: Chunk<B>) -> Result<(), Error<B>> {
450        let arena = Arena::new_chunk(&self.base, chunk)?;
451        self.push_arena(arena)?;
452        Ok(())
453    }
454
455    #[cfg(debug_assertions)]
456    pub(crate) fn check_ptr(&self, ptr: NonNull<u8>) -> bool {
457        self.all_arenas().any(|(_, arena)| arena.check_ptr(ptr))
458    }
459
460    pub(crate) fn allocate(
461        &self,
462        thread_id: u64,
463        count: NonZeroUsize,
464        align: usize,
465        is_large_or_huge: bool,
466    ) -> Result<SlabRef, Error<B>> {
467        let count = count.get();
468
469        let reclaimed = self.try_reclaim(thread_id, count, align, is_large_or_huge);
470        let ret = match reclaimed.map(Ok).or_else(|| {
471            self.arenas(false).find_map(|(_, arena)| {
472                arena.allocate(thread_id, count, align, is_large_or_huge, &self.base)
473            })
474        }) {
475            Some(slab) => slab?,
476            None => {
477                const MIN_RESERVE_COUNT: usize = 32;
478
479                let reserve_count = count.max(MIN_RESERVE_COUNT)
480                        /* .max(self.slab_count.load(Relaxed).isqrt()) */;
481                let arena = Arena::new(&self.base, reserve_count, Some(align), false)?;
482                let arena = self.push_arena(arena)?;
483                let res = arena.allocate(thread_id, count, align, is_large_or_huge, &self.base);
484                res.unwrap()?
485            }
486        };
487        // self.slab_count.fetch_add(count, Relaxed);
488        Ok(ret)
489    }
490
491    /// # Safety
492    ///
493    /// `slab` must be previously allocated from this structure;
494    pub(crate) unsafe fn deallocate(&self, slab: SlabRef) {
495        if !slab.is_abandoned() {
496            let arena = self.arenas[slab.arena_id - 1].load(Acquire);
497            debug_assert!(!arena.is_null());
498            // SAFETY: `arena` is obtained from the unique `arena_id`, and the arena won't
499            // be dropped as long as any allocation from it is alive.
500            let _slab_count = unsafe { (*arena).deallocate(slab, &self.base) };
501            // self.slab_count.fetch_sub(slab_count, Relaxed);
502        } else {
503            self.push_abandoned(slab)
504        }
505    }
506
507    /// Allocates a block of memory using `layout` from this structure directly,
508    /// bypassing any instance of [`Heap`](crate::heap::Heap).
509    ///
510    /// The pointer must not be deallocated by any instance of `Heap` or other
511    /// instances of this type.
512    pub fn allocate_direct(&self, layout: Layout, zero: bool) -> Result<NonNull<[u8]>, Error<B>> {
513        let arena = Arena::new(
514            &self.base,
515            layout.size().div_ceil(SLAB_SIZE),
516            Some(layout.align()),
517            true,
518        )?;
519        let arena = self.push_arena(arena)?;
520        Ok(NonNull::from_raw_parts(
521            arena.chunk.pointer().cast(),
522            layout.size(),
523        ))
524        .inspect(|&ptr| crate::heap::post_alloc(ptr, B::IS_ZEROED, zero))
525    }
526
527    /// Retrieves the layout information of an allocation.
528    ///
529    /// If `ptr` was not previously allocated directly from this structure,
530    /// `None` is returned.
531    pub fn layout_of_direct(&self, ptr: NonNull<u8>) -> Option<Layout> {
532        self.arenas(true)
533            .find(|(_, arena)| arena.chunk.pointer().cast() == ptr)
534            .map(|(_, arena)| arena.chunk.layout())
535    }
536
537    /// Deallocates an allocation previously from this structure.
538    ///
539    /// # Panics
540    ///
541    /// Panics if `ptr` is not allocated from this structure.
542    ///
543    /// # Safety
544    ///
545    /// No more aliases to the `ptr` should exist after calling this function.
546    pub unsafe fn deallocate_direct(&self, ptr: NonNull<u8>) {
547        if let Some((index, _)) = self
548            .arenas(true)
549            .find(|(_, arena)| arena.chunk.pointer().cast() == ptr)
550        {
551            track::deallocate(ptr, 0);
552            let arena = self.arenas[index].swap(ptr::null_mut(), AcqRel);
553            debug_assert!(!arena.is_null());
554            // SAFETY: `arena` is exclusive.
555            unsafe { Arena::drop(NonNull::new_unchecked(arena)) }
556        } else {
557            #[cfg(debug_assertions)]
558            panic!("deallocating memory not from these arenas: {ptr:p}")
559        }
560    }
561}
562
563impl<B: BaseAlloc> Drop for Arenas<B> {
564    fn drop(&mut self) {
565        let iter = self.arenas[..*self.arena_count.get_mut()].iter_mut();
566        iter.filter_map(|arena| NonNull::new(mem::replace(arena.get_mut(), ptr::null_mut())))
567            // SAFETY: All the arenas are unreferenced due to the lifetime model.
568            .for_each(|arena| unsafe { Arena::drop(arena) })
569    }
570}
571
572/// The errors of all the functions of this crate.
573#[derive(Debug)]
574pub enum Error<B: BaseAlloc> {
575    /// The base error returned when an allocation failed.
576    Alloc(B::Error),
577    /// The base error returned when an commission failed.
578    Commit(B::Error),
579    /// The arena collection is full of arenas.
580    ArenaExhausted,
581    /// The heap is not yet initialized.
582    Uninit,
583}
584
585impl<B: BaseAlloc> core::fmt::Display for Error<B>
586where
587    B::Error: core::fmt::Display,
588{
589    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
590        match self {
591            Error::Alloc(err) => write!(f, "base allocation failed: {err}"),
592            Error::Commit(err) => write!(f, "base commission failed: {err}"),
593            Error::ArenaExhausted => write!(f, "the arena collection is full of arenas"),
594            Error::Uninit => write!(f, "uninitialized heap"),
595        }
596    }
597}