vmem/
lib.rs

1#![doc = include_str!("../README.md")]
2#![cfg_attr(not(any(test, doc, feature = "std")), no_std)]
3#![cfg_attr(feature = "nightly", feature(allocator_api))]
4#![deny(missing_docs)]
5
6use core::{cell::UnsafeCell, fmt::Debug, num::NonZeroUsize, ptr::NonNull};
7
8use allocation_table::AllocationTable;
9use freelists::Freelists;
10use list::{SegmentList, SpecialList};
11
12pub use crate::bt::Bt;
13use crate::bt::{Link, Type};
14
15#[doc(inline)]
16pub use alloc::*;
17#[doc(inline)]
18pub use error::*;
19#[doc(inline)]
20pub use layout::*;
21#[doc(inline)]
22pub use lock::*;
23
24pub mod alloc;
25pub mod error;
26pub mod layout;
27pub mod lock;
28
29mod allocation_table;
30mod bt;
31mod freelists;
32mod list;
33
34#[derive(PartialEq, Eq, Clone, Copy)]
35/// An allocation policy.
36pub enum AllocPolicy {
37    /// Allocate from the first block found that has a size greater than or
38    /// equal to the requested size. This can cause fragmentation, but it is
39    /// still generally the recommended policy, as it is O(1).
40    InstantFit,
41    /// Allocate the smallest block that can fit the requested size. This
42    /// framgents less than [`InstantFit`](AllocPolicy::InstantFit), but is O(n)
43    /// in the worst case.
44    BestFit,
45    /// Allocate from a block after the last-allocated block. This guarantees
46    /// that allocations will not be reused, even after they are freed, until
47    /// the arena wraps around again. This is beneficial for process IDs, for
48    /// example, where re-allocating the same ID instantly could cause a
49    /// spurious kill left over from the first process. Like
50    /// [`InstantFit`](AllocPolicy::InstantFit), this policy does not take
51    /// wasted size into account. However, it is not quite O(1), because it
52    /// needs to traverse list to find the next block. It will likely be close
53    /// to O(1), but could be O(n) in the worst case.
54    NextFit,
55}
56
57/// Something that can be used as a source for an arena. See
58/// [`Arena`](Arena#importing-from-a-source) for more details.
59pub trait Source: Sync {
60    /// Import a block that satisfies `Layout` from this source, returning its
61    /// base.
62    ///
63    /// # Panics
64    /// This function must not panic. If it fails, it should return an error.
65    fn import(&self, layout: Layout) -> error::Result<usize>;
66    /// Release a block of memory that was previously imported from this source.
67    ///
68    /// # Panics
69    /// This function must not panic. If it fails, it should return an error.
70    ///
71    /// # Safety
72    /// The base and size must point to a valid block of memory that was
73    /// allocated through [`import`](Source::import) from this source, and was
74    /// not [`release`](Source::release)d yet.
75    unsafe fn release(&self, base: usize, size: usize) -> error::Result<()>;
76}
77impl<'label, 'src, A: alloc::Allocator + Sync, L: lock::Lock + Sync> Source
78    for Arena<'label, 'src, A, L>
79{
80    fn import(&self, layout: Layout) -> error::Result<usize> {
81        match (layout.min(), layout.max(), layout.phase(), layout.nocross()) {
82            (None, None, None, None) if layout.align() <= self.quantum => {
83                self.alloc(layout.size(), AllocPolicy::InstantFit)
84            }
85            _ => self.xalloc(layout, AllocPolicy::InstantFit),
86        }
87    }
88
89    unsafe fn release(&self, base: usize, _size: usize) -> error::Result<()> {
90        self.free(base)
91    }
92}
93
94/// A vmem arena.
95///
96/// This arena can be used for allocation of any resource that can be
97/// represented by a base and a length. However, it is quite inefficient in its
98/// memory usage for small allocations, so it is recommended to allocate larger
99/// blocks and then suballocate from those, unless memory usage is not a
100/// concern.
101///
102/// # Usage
103/// In order to create an arena, two things are needed:
104/// - **A locking mechanism**. You have many choices. Whatever you decide to
105///   use, it must implement [`lock::Lock`]. The trait is implemented by
106///   [`spin::Mutex<()>`] by default, but that does not mean that you should use
107///   [`spin`]'s mutex. [Spinlocks are rarely the right choice for a
108///   lock](https://matklad.github.io/2020/01/02/spinlocks-considered-harmful.html).
109/// - **An allocator**. This is the type that will be used to allocate the
110///   boundary tags used within the allocator. It must implement the
111///   [`alloc::Allocator`] trait. With the `nightly` feature, the trait is
112///   implemented for all types that implement [`core::alloc::Allocator`].
113///
114/// Once you have these things, you can move on to [`Arena::create`]:
115/// ```rust
116/// # #![feature(allocator_api)]
117/// # use vmem::{Arena};
118/// # use std::alloc::Global as Allocator;
119/// # type Lock = std::sync::Mutex<()>;
120/// let arena = Arena::<_, Lock>::create("test", 8, None, Allocator).unwrap();
121/// ```
122/// > #### Why `unwrap`?
123/// > The `create` function only returns an error if the specified quantum value
124/// > is not a power of two. Since we know that 8 is a power of two, we can
125/// > unwrap the value without worrying about any panics.
126///
127/// ## Filling the arena
128/// Without a source, arenas are empty by default. In order to allocate from
129/// them, you must first add a span via [`Arena::add_span`]. This function takes
130/// a base and a length, and marks it as available for allocation.
131///
132/// ## Importing from a source
133/// A source can be another arena, or any other type that implements [`Source`].
134/// In order to add a source to an arena, you must pass it to the parameter
135/// `source` during creation. Once a source is specified, allocations will be
136/// redirected there (unless the arena has its own free spans available).
137pub struct Arena<'label, 'src, A: alloc::Allocator, L: lock::Lock> {
138    label: &'label str,
139    lock: L,
140    inner: UnsafeCell<ArenaInner>,
141    source: Option<&'src dyn Source>,
142    allocator: A,
143    quantum: usize,
144}
145impl<'label, 'src, A: alloc::Allocator, L: lock::Lock> Arena<'label, 'src, A, L> {
146    /// Create a new empty arena.
147    ///
148    /// # Parameters
149    /// - `label` - a label for the arena. This is used for debugging purposes.
150    /// - `quantum` - the quantum of the arena. It is the smallest transactional
151    ///  unit in the arena (meaning that any allocations within the arena will
152    ///  be rounded up to and aligned to a multiple of it), and must be a power
153    ///  of two.
154    /// - `source` - an optional source for allocations. If specified, any calls
155    ///   to [`alloc`](Arena::alloc) or [`xalloc`](Arena::xalloc) that fail will
156    ///   be forwarded to the source. If not specified, allocations will fail
157    ///   directly if the arena is empty.
158    /// - `allocator` - the allocator to use for allocating boundary tags.
159    ///
160    /// # Returns
161    /// If the quantum is not a power of two, [`Error::InvalidQuantum`] will be
162    /// returned. Otherwise, the function will return an arena
163    pub fn create(
164        label: &'label str,
165        quantum: usize,
166        source: Option<&'src dyn Source>,
167        allocator: A,
168    ) -> error::Result<Self> {
169        if !quantum.is_power_of_two() {
170            return Err(Error::InvalidQuantum);
171        }
172
173        let segment_list = SegmentList::EMPTY;
174        let freelists = Freelists::new();
175        let allocation_table = AllocationTable::new();
176
177        Ok(Self {
178            label,
179            lock: L::default(),
180            inner: UnsafeCell::new(ArenaInner {
181                segment_list,
182                freelists,
183                allocation_table,
184                last_allocated: None,
185                next_fit_offset: None,
186            }),
187            allocator,
188            source,
189            quantum,
190        })
191    }
192
193    /// Add a span to the arena. The span will be used for future allocations.
194    ///
195    /// # Parameters
196    /// - `base` - the base of the span. This is the address of the first byte
197    ///   or index of the span. Do note that it is **not** a multiple of
198    ///   quantum, but the actual value.
199    /// - `len` - the length of the span. This is the number of bytes or indices
200    ///   in the span. Do note that it is **not** a multiple of quantum, but the
201    ///   actual value.
202    ///
203    /// # Returns
204    /// If the span could not be added, one of these errors will be returned:
205    /// - [`Error::AllocatorError`] - the allocator could not allocate a
206    ///   boundary tag to support this span.
207    /// - [`Error::AllocZeroSize`] - the span has no size.
208    /// - [`Error::UnalignedSpan`] - the span is not aligned to `quantum`
209    /// - [`Error::WrappingSpan`] - the span could not be added because it would
210    ///   overflow.
211    pub fn add_span(&self, base: usize, len: usize) -> error::Result<()> {
212        if len == 0 {
213            return Err(Error::AllocZeroSize);
214        }
215        if usize::MAX - base < len {
216            return Err(Error::WrappingSpan);
217        }
218        if base % self.quantum != 0 || len % self.quantum != 0 {
219            return Err(Error::UnalignedSpan);
220        }
221        let guard = self.lock.lock();
222        unsafe { (*self.inner.get()).add_span(base, len, false, &self.allocator)? };
223        drop(guard);
224
225        self.allocator.done();
226
227        Ok(())
228    }
229
230    /// Borrow a span from the source, and add it to the arena.
231    ///
232    /// # Parameters
233    /// - `layout` - the layout of the span to borrow. See [`Layout`] for more
234    ///   details.
235    ///
236    /// # Returns
237    /// If a span could not be borrowed, this function will forward the error
238    /// given by the source. The function could also return one of these errors:
239    /// - [`Error::NoSource`] - no source was specified for this arena.
240    /// - [`Error::AllocatorError`] - the allocator could not allocate a boundary
241    ///   tag to support this span.
242    pub fn import(&self, mut layout: Layout) -> error::Result<()> {
243        layout.align_up_to(self.quantum);
244        layout.set_size(layout.size().next_multiple_of(self.quantum));
245
246        let source = self.source.ok_or(Error::NoSource)?;
247        let base = source.import(layout.clone())?;
248
249        let guard = self.lock.lock();
250        unsafe { (*self.inner.get()).add_span(base, layout.size(), true, &self.allocator)? };
251        drop(guard);
252
253        self.allocator.done();
254
255        Ok(())
256    }
257
258    /// Allocates a block based on a size and an allocation policy.
259    ///
260    /// # Parameters
261    /// - `size` - the size of the block to allocate. This is the number of
262    ///   bytes or indices in the block. Do note that it is **not** a multiple
263    ///   of quantum, but the actual value.
264    /// - `policy` - the allocation policy to use. See [`AllocPolicy`] for more
265    ///   details.
266    ///
267    /// # Returns
268    /// If a block could not be allocated, one of the following errors will be
269    /// returned:
270    /// - [`Error::Empty`] - the arena is empty, and no source was specified to
271    ///   borrow from.
272    /// - [`Error::AllocatorError`] - the allocator could not allocate a
273    ///   boundary tag to support this block.
274    /// - [`Error::AllocZeroSize`] - the requested allocation has no size.
275    ///
276    /// If a source is specified, this function will forward any errors given by
277    /// it.
278    pub fn alloc(&self, size: usize, policy: AllocPolicy) -> error::Result<usize> {
279        if size == 0 {
280            return Err(Error::AllocZeroSize);
281        }
282        let size = size.next_multiple_of(self.quantum);
283        let guard = self.lock.lock();
284        let result = unsafe { (*self.inner.get()).alloc(size, policy, &self.allocator) };
285        drop(guard);
286
287        self.allocator.done();
288
289        result.or_else(|e| {
290            if e == Error::Empty && self.source.is_some() {
291                self.import(Layout::from_size_align(size, self.quantum).unwrap())?;
292                self.alloc(size, policy)
293            } else {
294                Err(e)
295            }
296        })
297    }
298
299    /// Allocates a block based on a layout and an allocation policy. See
300    /// [`Layout`] for more details on what features are supported.
301    ///
302    /// # Parameters
303    /// - `layout` - the layout of the block to allocate.
304    /// - `policy` - the allocation policy to use. See [`AllocPolicy`] for more
305    ///   details.
306    ///
307    /// # Returns
308    /// If a block could not be allocated, one of the following errors will be
309    /// returned:
310    /// - [`Error::Empty`] - the arena is empty, and no source was specified to
311    ///  borrow from.
312    /// - [`Error::AllocatorError`] - the allocator could not allocate a boundary
313    /// tag to support this block.
314    /// - [`Error::AllocZeroSize`] - the requested allocation has no size.
315    ///
316    /// If a source is specified, this function will forward any errors given by
317    /// it.
318    ///
319    /// # Panics
320    /// This function will panic if you attempt to call it with
321    /// [`AllocPolicy::NextFit`]. This policy is purposefully unsupported.
322    pub fn xalloc(&self, mut layout: Layout, policy: AllocPolicy) -> error::Result<usize> {
323        if policy == AllocPolicy::NextFit {
324            // if we panic after the lock is held, we could cause a poison, and
325            // poison is guaranteed to never happen.
326            unimplemented!("next fit constrained allocations are not supported");
327        }
328
329        layout.set_size(layout.size().next_multiple_of(self.quantum));
330        let guard = self.lock.lock();
331        let result = unsafe { (*self.inner.get()).xalloc(layout.clone(), policy, &self.allocator) };
332        drop(guard);
333
334        self.allocator.done();
335
336        result.or_else(|_| {
337            if self.source.is_some() {
338                self.import(layout.clone())?;
339                self.xalloc(layout, policy)
340            } else {
341                Err(Error::Empty)
342            }
343        })
344    }
345
346    /// Free a block of memory.
347    ///
348    /// # Parameters
349    /// - `base` - the base of the block to free. This is the address of the
350    ///  first byte or index of the block. Do note that it is **not** a multiple
351    /// of quantum, but the actual value.
352    ///
353    /// # Returns
354    /// If the block could not be freed, one of the following errors will be
355    /// returned:
356    /// - [`Error::NoSuchAllocation`] - the block was not allocated by this
357    ///   arena, or was already freed.
358    pub fn free(&self, base: usize) -> error::Result<()> {
359        let guard = self.lock.lock();
360
361        let inner = unsafe { &mut *self.inner.get() };
362        let bt = inner.free(base, &self.allocator)?;
363        let to_free = match (
364            unsafe { bt.as_ref() }.link.prev,
365            unsafe { bt.as_ref() }.link.next,
366        ) {
367            (Some(prev), next)
368                if unsafe { prev.as_ref() }.ty == Type::Borrowed
369                    && next
370                        .map(|bt| unsafe { bt.as_ref() }.is_span())
371                        .unwrap_or(true) =>
372            {
373                let base = unsafe { prev.as_ref() }.base;
374                unsafe {
375                    inner.segment_list.remove(prev);
376                    inner.segment_list.remove(bt);
377                }
378
379                unsafe {
380                    self.allocator.deallocate(prev);
381                    self.allocator.deallocate(bt);
382                }
383
384                Some(base)
385            }
386            _ => {
387                let freelist = inner.freelists.list_of_mut(unsafe { bt.as_ref() }.len);
388                unsafe {
389                    freelist.insert(bt);
390                }
391                None
392            }
393        };
394
395        drop(guard);
396
397        self.allocator.done();
398
399        if let Some(base) = to_free {
400            unsafe {
401                self.source
402                    .ok_or(Error::NoSource)?
403                    .release(base, bt.as_ref().len)?;
404            }
405        }
406
407        Ok(())
408    }
409
410    /// Get the total space contained in this arena.
411    ///
412    /// Note that this iterates the entire list, and as such is not incredibly
413    /// performant. This may change in the future.
414    ///
415    /// # Returns
416    /// The total space contained in this arena, in terms of bytes/indices. This
417    /// includes both allocated and free space, including borrowed space.
418    pub fn total_space(&self) -> usize {
419        let guard = self.lock.lock();
420        let inner = unsafe { &*self.inner.get() };
421        let total = unsafe { inner.segment_list.iter() }
422                .filter(|bt| unsafe { bt.as_ref() }.ty == Type::Span || unsafe { bt.as_ref() }.ty == Type::Borrowed)
423                .fold(0, |acc, bt| acc + unsafe { bt.as_ref() }.len);
424        drop(guard);
425        total
426    }
427
428    /// Get the allocated space contained in this arena.
429    ///
430    /// Note that this iterates the entire list, and as such is not incredibly
431    /// performant. This may change in the future.
432    ///
433    /// # Returns
434    /// The allocated space contained in this arena, in terms of bytes/indices.
435    /// This includes both borrowed and non-borrowed allocations.
436    pub fn allocated_space(&self) -> usize {
437        let guard = self.lock.lock();
438        let inner = unsafe { &*self.inner.get() };
439        let allocated = unsafe { inner.segment_list.iter() }
440            .filter(|bt| unsafe { bt.as_ref() }.ty == Type::Allocated)
441            .fold(0, |acc, bt| acc + unsafe { bt.as_ref() }.len);
442        drop(guard);
443        allocated
444    }
445
446    /// Get the free space contained in this arena.
447    ///
448    /// Note that this iterates the entire list, and as such is not incredibly
449    /// performant. This is subject to change in the future.
450    ///
451    /// # Returns
452    /// The free space contained in this arena, in terms of bytes/indices.
453    /// This includes both borrowed and non-borrowed allocations.
454    pub fn free_space(&self) -> usize {
455        let guard = self.lock.lock();
456        let inner = unsafe { &*self.inner.get() };
457        let free = unsafe { inner.segment_list.iter() }
458            .filter(|bt| unsafe { bt.as_ref() }.ty == Type::Free)
459            .fold(0, |acc, bt| acc + unsafe { bt.as_ref() }.len);
460        drop(guard);
461        free
462    }
463
464    /// Get the borrowed space contained in this arena.
465    ///
466    /// Note that this iterates the entire list, and as such is not incredibly
467    /// performant. This is subject to change in the future.
468    ///
469    /// # Returns
470    /// The borrowed space contained in this arena, in terms of bytes/indices.
471    /// This includes both free and allocated borrowed space.
472    pub fn borrowed_space(&self) -> usize {
473        let guard = self.lock.lock();
474        let inner = unsafe { &*self.inner.get() };
475        let borrowed = unsafe { inner.segment_list.iter() }
476            .filter(|bt| unsafe { bt.as_ref() }.ty == Type::Borrowed)
477            .fold(0, |acc, bt| acc + unsafe { bt.as_ref() }.len);
478        drop(guard);
479        borrowed
480    }
481
482    /// Get the label for this arena.
483    pub fn label(&self) -> &'label str {
484        self.label
485    }
486}
487impl<'label, 'src, A: alloc::Allocator, L: lock::Lock> Debug for Arena<'label, 'src, A, L> {
488    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
489        writeln!(f, "Arena {} with:", self.label)?;
490        let guard = self.lock.lock();
491
492        let inner = unsafe { &*self.inner.get() };
493        for bt in unsafe { inner.segment_list.iter() } {
494            writeln!(f, "  {:?}", unsafe { bt.as_ref() })?;
495        }
496
497        writeln!(
498            f,
499            "  Last alloc: {:?}",
500            inner.last_allocated.map(|v| unsafe { v.as_ref() })
501        )?;
502        writeln!(
503            f,
504            "  Next fit offset: {:#x?}",
505            inner.next_fit_offset.map(NonZeroUsize::get).unwrap_or(0)
506        )?;
507
508        drop(guard);
509
510        Ok(())
511    }
512}
513impl<'label, 'src, A: alloc::Allocator, L: lock::Lock> Drop for Arena<'label, 'src, A, L> {
514    fn drop(&mut self) {
515        let guard = self.lock.lock();
516        let inner = unsafe { &mut *self.inner.get() };
517        for bt in unsafe { inner.segment_list.iter() } {
518            if unsafe { bt.as_ref() }.ty == Type::Borrowed {
519                unsafe {
520                    self.source
521                        .unwrap()
522                        .release(bt.as_ref().base, bt.as_ref().len)
523                        .unwrap();
524                }
525            }
526            unsafe {
527                self.allocator.deallocate(bt);
528            }
529        }
530        drop(guard);
531    }
532}
533unsafe impl<'label, 'src, A: alloc::Allocator + Sync, L: lock::Lock + Sync> Sync
534    for Arena<'label, 'src, A, L>
535{
536}
537unsafe impl<'label, 'src, A: alloc::Allocator + Send, L: lock::Lock + Send> Send
538    for Arena<'label, 'src, A, L>
539{
540}
541
542struct ArenaInner {
543    segment_list: SegmentList,
544    freelists: Freelists,
545    allocation_table: AllocationTable,
546    last_allocated: Option<NonNull<Bt>>,
547    next_fit_offset: Option<NonZeroUsize>,
548}
549impl ArenaInner {
550    fn add_span(
551        &mut self,
552        base: usize,
553        len: usize,
554        borrowed: bool,
555        allocator: &impl alloc::Allocator,
556    ) -> error::Result<()> {
557        let span = allocator.allocate().ok_or(Error::AllocatorError)?;
558        let bt = allocator.allocate().ok_or_else(|| {
559            unsafe {
560                // We don't want to leak memory in the case that the second
561                // allocation fails.
562                allocator.deallocate(span);
563            }
564            Error::AllocatorError
565        })?;
566
567        unsafe {
568            span.as_ptr().write(Bt {
569                link: Link::UNLINKED,
570                base,
571                len,
572                special: Link::UNLINKED,
573                ty: if borrowed { Type::Borrowed } else { Type::Span },
574            })
575        }
576        unsafe {
577            bt.as_ptr().write(Bt {
578                link: Link::UNLINKED,
579                base,
580                len,
581                special: Link::UNLINKED,
582                ty: Type::Free,
583            })
584        }
585
586        unsafe {
587            let (prev, next) = self.segment_list.insertion_point(base);
588            self.segment_list.insert_between(span, prev, next);
589            self.segment_list.insert_between(bt, Some(span), next);
590        }
591        let freelist = self.freelists.list_of_mut(len);
592        unsafe {
593            freelist.insert(bt);
594        }
595
596        Ok(())
597    }
598
599    fn split(
600        &mut self,
601        new_size: usize,
602        base: usize,
603        len: usize,
604        mut bt: NonNull<Bt>,
605        allocator: &impl alloc::Allocator,
606    ) -> error::Result<()> {
607        if len == new_size {
608            return Ok(());
609        }
610
611        unsafe { bt.as_mut() }.len = new_size;
612
613        let after_split = allocator.allocate().ok_or(Error::AllocatorError)?;
614        unsafe {
615            after_split.as_ptr().write(Bt {
616                link: Link::UNLINKED,
617                base: base + new_size,
618                len: len - new_size,
619                special: Link::UNLINKED,
620                ty: Type::Free,
621            })
622        }
623        unsafe {
624            self.segment_list.insert_after(after_split, bt);
625        }
626        let freelist = self.freelists.list_of_mut(len - new_size);
627        unsafe {
628            freelist.insert(after_split);
629        }
630
631        Ok(())
632    }
633
634    fn non_empty_freelist_for(&mut self, aligned_size: usize) -> error::Result<&mut SpecialList> {
635        let mut freelist_n = aligned_size.next_power_of_two();
636        let mut freelist = self.freelists.list_for_mut(freelist_n);
637        while freelist.is_empty() {
638            freelist_n = freelist_n.checked_mul(2).ok_or(Error::Empty)?;
639            freelist = self.freelists.list_for_mut(freelist_n);
640        }
641        Ok(unsafe { &mut *(freelist as *mut _) })
642    }
643
644    fn alloc(
645        &mut self,
646        size: usize,
647        policy: AllocPolicy,
648        allocator: &impl alloc::Allocator,
649    ) -> error::Result<usize> {
650        let mut bt = match policy {
651            AllocPolicy::InstantFit => {
652                let freelist = self.non_empty_freelist_for(size)?;
653
654                unsafe { freelist.pop() }.unwrap()
655            }
656            AllocPolicy::BestFit => {
657                let bt = if !size.is_power_of_two() {
658                    let smaller_freelist = self.freelists.list_of_mut(size);
659                    if !smaller_freelist.is_empty() {
660                        unsafe { smaller_freelist.iter() }
661                            .filter(|bt| unsafe { bt.as_ref() }.len >= size)
662                            .min_by_key(|bt| unsafe { bt.as_ref() }.len - size)
663                            .map(|bt| unsafe {
664                                smaller_freelist.remove(bt);
665                                bt
666                            })
667                    } else {
668                        None
669                    }
670                } else {
671                    None
672                };
673                if let Some(bt) = bt {
674                    bt
675                } else {
676                    let freelist = self.non_empty_freelist_for(size)?;
677                    let bt = unsafe { freelist.iter() }
678                        .min_by_key(|bt| unsafe { bt.as_ref() }.len)
679                        .unwrap();
680                    unsafe {
681                        freelist.remove(bt);
682                    }
683                    bt
684                }
685            }
686            AllocPolicy::NextFit => {
687                if let Some(mut bt) = self.last_allocated {
688                    if let Some(offset) = self.next_fit_offset {
689                        if unsafe { bt.as_ref() }.len - offset.get() >= size {
690                            let base = unsafe { bt.as_ref() }.base;
691                            let len = unsafe { bt.as_ref() }.len;
692                            let freelist = self.freelists.list_of_mut(len);
693                            unsafe {
694                                freelist.remove(bt);
695                            }
696
697                            self.split(offset.get(), base, len, bt, allocator)?;
698                        }
699                    }
700                    bt = match unsafe { bt.as_ref() }.link.next {
701                        Some(bt) => bt,
702                        None => return self.alloc(size, AllocPolicy::InstantFit, allocator),
703                    };
704                    while unsafe { bt.as_ref() }.ty != Type::Free
705                        || unsafe { bt.as_ref() }.len < size
706                    {
707                        bt = match unsafe { bt.as_ref() }.link.next {
708                            Some(bt) => bt,
709                            None => return self.alloc(size, AllocPolicy::InstantFit, allocator),
710                        };
711                    }
712
713                    let freelist = self.freelists.list_of_mut(unsafe { bt.as_ref() }.len);
714                    unsafe {
715                        freelist.remove(bt);
716                    }
717
718                    bt
719                } else {
720                    return self.alloc(size, AllocPolicy::InstantFit, allocator);
721                }
722            }
723        };
724
725        let base = unsafe { bt.as_ref() }.base;
726        let len = unsafe { bt.as_ref() }.len;
727
728        self.split(size, base, len, bt, allocator).map_err(|e| {
729            let freelist = self.freelists.list_of_mut(len);
730            unsafe {
731                freelist.insert(bt);
732            }
733            e
734        })?;
735
736        unsafe { bt.as_mut() }.ty = Type::Allocated;
737        unsafe {
738            self.allocation_table.insert(bt);
739        }
740
741        self.last_allocated = Some(bt);
742        self.next_fit_offset = None;
743
744        Ok(base)
745    }
746
747    fn xalloc(
748        &mut self,
749        layout: Layout,
750        policy: AllocPolicy,
751        allocator: &impl alloc::Allocator,
752    ) -> error::Result<usize> {
753        let (mut bt, offset) = match policy {
754            AllocPolicy::InstantFit => {
755                let freelist = self.non_empty_freelist_for(layout.size())?;
756                let (bt, offset) = unsafe { freelist.iter() }
757                    .find_map(|bt| {
758                        unsafe { bt.as_ref() }
759                            .satisfies_layout(&layout)
760                            .map(|offset| (bt, offset))
761                    })
762                    .ok_or(Error::Empty)?;
763
764                unsafe {
765                    freelist.remove(bt);
766                }
767
768                (bt, offset)
769            }
770            AllocPolicy::BestFit => {
771                let bt = if !layout.size().is_power_of_two() {
772                    let smaller_freelist = self.freelists.list_of_mut(layout.size());
773                    if !smaller_freelist.is_empty() {
774                        unsafe { smaller_freelist.iter() }
775                            .filter_map(|bt| {
776                                unsafe { bt.as_ref() }
777                                    .satisfies_layout(&layout)
778                                    .map(|offset| (bt, offset))
779                            })
780                            .min_by_key(|(bt, _)| unsafe { bt.as_ref() }.len - layout.size())
781                            .map(|(bt, offset)| unsafe {
782                                smaller_freelist.remove(bt);
783                                (bt, offset)
784                            })
785                    } else {
786                        None
787                    }
788                } else {
789                    None
790                };
791                if let Some(bt) = bt {
792                    bt
793                } else {
794                    let freelist = self.non_empty_freelist_for(layout.size())?;
795                    let (bt, offset) = unsafe { freelist.iter() }
796                        .filter_map(|bt| {
797                            unsafe { bt.as_ref() }
798                                .satisfies_layout(&layout)
799                                .map(|offset| (bt, offset))
800                        })
801                        .min_by_key(|(bt, _)| unsafe { bt.as_ref() }.len)
802                        .ok_or(Error::Empty)?;
803                    unsafe {
804                        freelist.remove(bt);
805                    }
806                    (bt, offset)
807                }
808            }
809            AllocPolicy::NextFit => unreachable!(),
810        };
811
812        let base = unsafe { bt.as_ref() }.base;
813        let len = unsafe { bt.as_ref() }.len;
814
815        if offset != 0 {
816            self.split(offset, base, len, bt, allocator).map_err(|e| {
817                let freelist = self.freelists.list_of_mut(unsafe { bt.as_ref() }.len);
818                unsafe {
819                    freelist.insert(bt);
820                }
821                e
822            })?;
823
824            let freelist = self.freelists.list_of_mut(unsafe { bt.as_ref() }.len);
825            unsafe {
826                freelist.insert(bt);
827            }
828
829            bt = unsafe { bt.as_ref() }.link.next.unwrap();
830        }
831
832        let aligned_base = base + offset;
833
834        let len = unsafe { bt.as_ref() }.len;
835
836        self.split(layout.size(), aligned_base, len - offset, bt, allocator)?;
837
838        let freelist = self.freelists.list_of_mut(len);
839        unsafe {
840            freelist.remove(bt);
841        }
842
843        unsafe { bt.as_mut() }.ty = Type::Allocated;
844        unsafe {
845            self.allocation_table.insert(bt);
846        }
847
848        self.last_allocated = Some(bt);
849        self.next_fit_offset = None;
850
851        Ok(aligned_base)
852    }
853
854    fn free(
855        &mut self,
856        base: usize,
857        allocator: &impl alloc::Allocator,
858    ) -> error::Result<NonNull<Bt>> {
859        let mut bt =
860            unsafe { self.allocation_table.remove(base) }.ok_or(Error::NoSuchAllocation)?;
861        unsafe { bt.as_mut() }.ty = Type::Free;
862
863        match unsafe { bt.as_ref() }.link.prev {
864            Some(mut prev)
865                if unsafe { prev.as_ref() }.ty == Type::Free
866                    && unsafe { prev.as_ref() }.base + unsafe { prev.as_ref() }.len == base =>
867            {
868                let prev_mut = unsafe { prev.as_mut() };
869                let freelist = self.freelists.list_of_mut(prev_mut.len);
870                unsafe {
871                    freelist.remove(prev);
872                }
873                prev_mut.len += unsafe { bt.as_ref() }.len;
874                if self.last_allocated == Some(bt) {
875                    self.last_allocated = Some(prev);
876                }
877                unsafe {
878                    self.segment_list.remove(bt);
879                }
880                unsafe {
881                    allocator.deallocate(bt);
882                }
883                bt = prev;
884            }
885            _ => {}
886        }
887
888        match unsafe { bt.as_ref() }.link.next {
889            Some(next)
890                if unsafe { next.as_ref() }.ty == Type::Free
891                    && base + unsafe { bt.as_ref() }.len == unsafe { next.as_ref() }.base =>
892            {
893                let bt_mut = unsafe { bt.as_mut() };
894                if self.last_allocated == Some(next) {
895                    self.last_allocated = Some(bt);
896                } else if self.last_allocated == Some(bt) && self.next_fit_offset.is_none() {
897                    self.next_fit_offset = NonZeroUsize::new(unsafe { bt.as_ref() }.len);
898                }
899                bt_mut.len += unsafe { next.as_ref() }.len;
900                let freelist = self.freelists.list_of_mut(unsafe { next.as_ref() }.len);
901                unsafe {
902                    freelist.remove(next);
903                }
904                unsafe {
905                    self.segment_list.remove(next);
906                }
907                unsafe {
908                    allocator.deallocate(next);
909                }
910            }
911            _ => {}
912        }
913
914        Ok(bt)
915    }
916}
917
918#[cfg(test)]
919mod tests {
920    use std::alloc::Global;
921    use std::sync::Mutex;
922
923    use super::*;
924
925    fn create_arena() -> Arena<'static, 'static, Global, Mutex<()>> {
926        Arena::<_, Mutex<()>>::create("root", 8, None, Global).unwrap()
927    }
928
929    fn create_arena_source(source: &dyn Source) -> Arena<'static, '_, Global, Mutex<()>> {
930        Arena::<_, Mutex<()>>::create("borrower", 8, Some(source), Global).unwrap()
931    }
932
933    #[test]
934    fn invalid_quantum() {
935        assert!(matches!(
936            Arena::<_, Mutex<()>>::create("test", 42, None, Global),
937            Err(Error::InvalidQuantum)
938        ));
939    }
940
941    #[test]
942    fn instant_fit() {
943        let my_arena = create_arena();
944        my_arena.add_span(0x1000, 0x1000).unwrap();
945        println!("{my_arena:?}");
946        my_arena.alloc(0x10, AllocPolicy::InstantFit).unwrap();
947        println!("{my_arena:?}");
948        my_arena.alloc(0x10, AllocPolicy::InstantFit).unwrap();
949        println!("{my_arena:?}");
950        assert!(matches!(
951            my_arena.alloc(0x1000, AllocPolicy::InstantFit),
952            Err(Error::Empty)
953        ))
954    }
955
956    #[test]
957    fn best_fit() {
958        let my_arena = create_arena();
959        my_arena.add_span(0x1000, 0x400).unwrap();
960        my_arena.add_span(0x1400, 0x100).unwrap();
961        my_arena.add_span(0x1500, 0x200).unwrap();
962        println!("{my_arena:?}");
963        let base = my_arena.alloc(0x10, AllocPolicy::BestFit).unwrap();
964        assert_eq!(base, 0x1400);
965        println!("{my_arena:?}");
966        let base = my_arena.alloc(0x100, AllocPolicy::BestFit).unwrap();
967        assert_eq!(base, 0x1500);
968    }
969
970    #[test]
971    fn next_fit() {
972        let my_arena = create_arena();
973        my_arena.add_span(0x1000, 0x1000).unwrap();
974        println!("{my_arena:?}");
975        let base = my_arena.alloc(0x10, AllocPolicy::NextFit).unwrap();
976        println!("{my_arena:?}");
977        my_arena.free(base).unwrap();
978        println!("{my_arena:?}");
979        let new_base = my_arena.alloc(0x10, AllocPolicy::NextFit).unwrap();
980        assert_ne!(base, new_base);
981    }
982
983    #[test]
984    fn xalloc_instant_fit() {
985        let my_arena = create_arena();
986        my_arena.add_span(0x1000, 0x1000).unwrap();
987        println!("{my_arena:?}");
988        let base = my_arena
989            .xalloc(
990                Layout::from_size_align(0x10, 0x10).unwrap(),
991                AllocPolicy::InstantFit,
992            )
993            .unwrap();
994        println!("{my_arena:?}");
995        assert!(base % 0x10 == 0);
996        let base = my_arena
997            .xalloc(
998                Layout::from_size_align(0x10, 0x100).unwrap(),
999                AllocPolicy::InstantFit,
1000            )
1001            .unwrap();
1002        println!("{my_arena:?}");
1003        assert!(base % 0x100 == 0);
1004        let base = my_arena
1005            .xalloc(
1006                Layout::from_size_align(0x10, 0x10).unwrap(),
1007                AllocPolicy::InstantFit,
1008            )
1009            .unwrap();
1010        assert!(base % 0x10 == 0);
1011    }
1012
1013    #[test]
1014    fn xalloc_best_fit() {
1015        let my_arena = create_arena();
1016        my_arena.add_span(0x1000, 0x400).unwrap();
1017        my_arena.add_span(0x1400, 0x100).unwrap();
1018        my_arena.add_span(0x1500, 0x200).unwrap();
1019        println!("{my_arena:?}");
1020        let base = my_arena
1021            .xalloc(
1022                Layout::from_size_align(0x10, 0x100).unwrap(),
1023                AllocPolicy::BestFit,
1024            )
1025            .unwrap();
1026        assert_eq!(base, 0x1400);
1027        println!("{my_arena:?}");
1028        let base = my_arena
1029            .xalloc(
1030                Layout::from_size_align(0x100, 0x10).unwrap(),
1031                AllocPolicy::BestFit,
1032            )
1033            .unwrap();
1034        assert_eq!(base, 0x1500);
1035    }
1036
1037    #[test]
1038    fn free() {
1039        let my_arena = create_arena();
1040        my_arena.add_span(0x1000, 0x1000).unwrap();
1041        println!("{my_arena:?}");
1042        let base = my_arena.alloc(0x10, AllocPolicy::InstantFit).unwrap();
1043        println!("{my_arena:?}");
1044        my_arena.free(base).unwrap();
1045        println!("{my_arena:?}");
1046        my_arena.alloc(0x1000, AllocPolicy::InstantFit).unwrap();
1047    }
1048
1049    #[test]
1050    fn import() {
1051        let source = create_arena();
1052        source.add_span(0x1000, 0x1000).unwrap();
1053        println!("{source:?}");
1054
1055        let my_arena = create_arena_source(&source);
1056        let first_base = my_arena.alloc(0x10, AllocPolicy::InstantFit).unwrap();
1057        println!("{source:?}");
1058        println!("{my_arena:?}");
1059
1060        let my_second_arena = create_arena_source(&source);
1061        my_second_arena
1062            .alloc(0x10, AllocPolicy::InstantFit)
1063            .unwrap();
1064        println!("{source:?}");
1065        println!("{my_second_arena:?}");
1066
1067        let second_base = my_arena
1068            .xalloc(
1069                Layout::from_size_align(0x10, 0x100).unwrap(),
1070                AllocPolicy::InstantFit,
1071            )
1072            .unwrap();
1073        assert_eq!(second_base % 0x100, 0);
1074        println!("{source:?}");
1075        println!("{my_arena:?}");
1076
1077        my_arena.free(first_base).unwrap();
1078        println!("{source:?}");
1079        println!("{my_arena:?}");
1080        my_arena.free(second_base).unwrap();
1081        println!("{source:?}");
1082        println!("{my_arena:?}");
1083
1084        assert_eq!(my_arena.borrowed_space(), 0x0);
1085    }
1086
1087    #[test]
1088    fn metrics() {
1089        let my_arena = create_arena();
1090        my_arena.add_span(0x1000, 0x1000).unwrap();
1091        println!("{my_arena:?}");
1092        my_arena.alloc(0x10, AllocPolicy::InstantFit).unwrap();
1093        println!("{my_arena:?}");
1094        my_arena.alloc(0x10, AllocPolicy::InstantFit).unwrap();
1095        println!("{my_arena:?}");
1096        assert_eq!(my_arena.total_space(), 0x1000);
1097        assert_eq!(my_arena.allocated_space(), 0x20);
1098        assert_eq!(my_arena.free_space(), 0xfe0);
1099        assert_eq!(my_arena.borrowed_space(), 0x0);
1100    }
1101}