ring_alloc/
local.rs

1use core::{
2    cell::Cell,
3    hash::{Hash, Hasher},
4    mem::ManuallyDrop,
5    ptr::NonNull,
6};
7
8use allocator_api2::alloc::{AllocError, Allocator, Layout};
9
10use crate::layout_max;
11
12type Chunk<const N: usize> = crate::chunk::Chunk<Cell<usize>, { N }>;
13
14/// Allocations up to this number of bytes are allocated in the tiny chunk.
15const TINY_ALLOCATION_MAX_SIZE: usize = 16;
16
17/// Size of the chunk for allocations not larger than `TINY_ALLOCATION_CHUNK_SIZE`.
18const TINY_ALLOCATION_CHUNK_SIZE: usize = 16384;
19
20/// Allocations up to this number of bytes are allocated in the small chunk.
21const SMALL_ALLOCATION_MAX_SIZE: usize = 256;
22
23/// Size of the chunk for allocations not larger than `SMALL_ALLOCATION_MAX_SIZE`.
24const SMALL_ALLOCATION_CHUNK_SIZE: usize = 65536;
25
26/// Allocations up to this number of bytes are allocated in the large chunk.
27const LARGE_ALLOCATION_MAX_SIZE: usize = 65536;
28
29/// Size of the chunk for allocations larger than `SMALL_ALLOCATION_MAX_SIZE`.
30const LARGE_ALLOCATION_CHUNK_SIZE: usize = 2097152;
31
32#[cfg(not(feature = "alloc"))]
33macro_rules! ring_alloc {
34    ($(#[$meta:meta])* pub struct $ring_alloc:ident;) => {
35        $(#[$meta])*
36        #[repr(transparent)]
37        pub struct $ring_alloc<A: Allocator> {
38            inner: NonNull<Rings<A>>,
39        }
40    };
41}
42
43#[cfg(feature = "alloc")]
44macro_rules! ring_alloc {
45    ($(#[$meta:meta])* pub struct $ring_alloc:ident;) => {
46        $(#[$meta])*
47        #[repr(transparent)]
48        #[must_use]
49        pub struct $ring_alloc<A: Allocator = allocator_api2::alloc::Global> {
50            inner: NonNull<Rings<A>>,
51        }
52    };
53}
54
55ring_alloc! {
56    /// Thread-local ring-allocator.
57    ///
58    /// This allocator uses underlying allocator to allocate memory chunks.
59    ///
60    /// Allocator uses ring buffer of chunks to allocate memory in front chunk,
61    /// moving it to back if chunk is full.
62    /// If next chunk is still occupied by previous allocation, allocator will
63    /// allocate new chunk.
64    pub struct RingAlloc;
65}
66
67impl<A> Clone for RingAlloc<A>
68where
69    A: Allocator,
70{
71    #[inline(always)]
72    fn clone(&self) -> Self {
73        Rings::inc_ref(self.inner);
74        RingAlloc { inner: self.inner }
75    }
76
77    #[inline(always)]
78    fn clone_from(&mut self, source: &Self) {
79        Rings::inc_ref(source.inner);
80        self.inner = source.inner;
81    }
82}
83
84impl<A> PartialEq for RingAlloc<A>
85where
86    A: Allocator,
87{
88    #[inline(always)]
89    fn eq(&self, other: &Self) -> bool {
90        self.inner == other.inner
91    }
92}
93
94impl<A> Hash for RingAlloc<A>
95where
96    A: Allocator,
97{
98    #[inline(always)]
99    fn hash<H: Hasher>(&self, state: &mut H) {
100        self.inner.hash(state);
101    }
102}
103
104impl<A> Drop for RingAlloc<A>
105where
106    A: Allocator,
107{
108    #[inline(always)]
109    fn drop(&mut self) {
110        Rings::dec_ref(self.inner);
111    }
112}
113
114type TinyChunk = Chunk<{ TINY_ALLOCATION_CHUNK_SIZE }>;
115type SmallChunk = Chunk<{ SMALL_ALLOCATION_CHUNK_SIZE }>;
116type LargeChunk = Chunk<{ LARGE_ALLOCATION_CHUNK_SIZE }>;
117
118struct Ring<T> {
119    // Head of the ring.
120    // This is the current chunk.
121    // When chunk is full, this chunk is moved to the end.
122    head: Cell<Option<NonNull<T>>>,
123
124    // Tail of the ring.
125    tail: Cell<Option<NonNull<T>>>,
126}
127
128impl<T> Ring<T> {
129    const fn new() -> Self {
130        Ring {
131            head: Cell::new(None),
132            tail: Cell::new(None),
133        }
134    }
135}
136
137struct Rings<A: Allocator> {
138    tiny_ring: Ring<TinyChunk>,
139    small_ring: Ring<SmallChunk>,
140    large_ring: Ring<LargeChunk>,
141    allocator: ManuallyDrop<A>,
142    ref_cnt: Cell<usize>,
143}
144
145impl<A> Rings<A>
146where
147    A: Allocator,
148{
149    #[inline(always)]
150    fn try_new_in(allocator: A) -> Result<NonNull<Self>, AllocError> {
151        let ptr = allocator.allocate(Layout::new::<Self>())?;
152        let inner = Rings {
153            tiny_ring: Ring::new(),
154            small_ring: Ring::new(),
155            large_ring: Ring::new(),
156            allocator: ManuallyDrop::new(allocator),
157            ref_cnt: Cell::new(1),
158        };
159
160        let ptr = ptr.cast::<Self>();
161
162        // Safety: `ptr` is valid pointer to `Self` allocated by `allocator`.
163        unsafe {
164            core::ptr::write(ptr.as_ptr(), inner);
165        }
166
167        Ok(ptr)
168    }
169
170    #[inline(always)]
171    #[cfg(not(no_global_oom_handling))]
172    fn new_in(allocator: A) -> NonNull<Self> {
173        match Self::try_new_in(allocator) {
174            Ok(ptr) => ptr,
175            #[cfg(feature = "alloc")]
176            Err(AllocError) => {
177                alloc::alloc::handle_alloc_error(Layout::new::<Self>());
178            }
179            #[cfg(not(feature = "alloc"))]
180            Err(AllocError) => {
181                core::panic!("Failed to allocate Rings");
182            }
183        }
184    }
185
186    fn inc_ref(ptr: NonNull<Self>) {
187        // Safety: `ptr` is valid pointer to `Self`.
188        let me = unsafe { ptr.as_ref() };
189        me.ref_cnt.set(me.ref_cnt.get() + 1);
190    }
191
192    fn dec_ref(ptr: NonNull<Self>) {
193        // Safety: `ptr` is valid pointer to `Self`.
194        let me = unsafe { ptr.as_ref() };
195
196        debug_assert_ne!(me.ref_cnt.get(), 0);
197        let new_ref_cnt = me.ref_cnt.get() - 1;
198        me.ref_cnt.set(new_ref_cnt);
199
200        if new_ref_cnt == 0 {
201            Self::free(ptr);
202        }
203    }
204
205    #[cold]
206    fn free(ptr: NonNull<Self>) {
207        // Safety: `ptr` is valid pointer to `Self`.
208        let me = unsafe { ptr.as_ref() };
209
210        me.free_all();
211
212        // Safety: taking allocator out `ManuallyDrop`.
213        // The value is dropped immediately after.
214        let allocator = unsafe { core::ptr::read(&*me.allocator) };
215
216        // Safety: `ptr` was allocated by `me.allocator`.
217        unsafe {
218            allocator.deallocate(ptr.cast(), Layout::new::<Self>());
219        }
220    }
221
222    #[inline(always)]
223    fn clean_all(&self) {
224        Self::clean(&self.tiny_ring, &self.allocator);
225        Self::clean(&self.small_ring, &self.allocator);
226        Self::clean(&self.large_ring, &self.allocator);
227    }
228
229    #[inline(always)]
230    fn clean<const N: usize>(ring: &Ring<Chunk<N>>, allocator: &A) {
231        let mut chunk = &ring.head;
232
233        while let Some(c) = chunk.get() {
234            if unsafe { c.as_ref().unused() } {
235                // Safety: chunks in the ring are always valid.
236                chunk.set(unsafe { c.as_ref().next() });
237
238                // Safety: `c` is valid pointer to `Chunk` allocated by `allocator`.
239                unsafe {
240                    Chunk::free(c, allocator);
241                }
242            } else {
243                // Safety: chunks in the ring are always valid.
244                chunk = unsafe { &c.as_ref().next };
245            }
246        }
247
248        if ring.head.get().is_none() {
249            ring.tail.set(None);
250        }
251    }
252
253    fn free_all(&self) {
254        Self::free_chunks(&self.tiny_ring, &self.allocator);
255        Self::free_chunks(&self.small_ring, &self.allocator);
256        Self::free_chunks(&self.large_ring, &self.allocator);
257    }
258
259    #[inline(always)]
260    fn free_chunks<const N: usize>(ring: &Ring<Chunk<N>>, allocator: &A) {
261        let mut chunk = ring.head.take();
262
263        while let Some(c) = chunk {
264            // Safety: chunks in the ring are always valid.
265            chunk = unsafe { c.as_ref().next() };
266            // Safety: `c` is valid pointer to `Chunk` allocated by `allocator`.
267            unsafe {
268                Chunk::free(c, allocator);
269            }
270        }
271
272        ring.tail.set(None);
273    }
274}
275
276#[cfg(not(no_global_oom_handling))]
277#[cfg(feature = "alloc")]
278impl RingAlloc {
279    /// Returns new [`RingAlloc`] that uses [`Global`] allocator.
280    #[inline(always)]
281    pub fn new() -> Self {
282        RingAlloc {
283            inner: Rings::new_in(allocator_api2::alloc::Global),
284        }
285    }
286}
287
288#[cfg(not(no_global_oom_handling))]
289impl<A> Default for RingAlloc<A>
290where
291    A: Allocator + Default,
292{
293    #[inline(always)]
294    fn default() -> Self {
295        RingAlloc::new_in(A::default())
296    }
297}
298
299impl<A> RingAlloc<A>
300where
301    A: Allocator,
302{
303    /// Returns new [`RingAlloc`] that uses given allocator.
304    #[cfg(not(no_global_oom_handling))]
305    #[inline(always)]
306    pub fn new_in(allocator: A) -> Self {
307        RingAlloc {
308            inner: Rings::new_in(allocator),
309        }
310    }
311
312    /// Attempts to create new [`RingAlloc`] that uses given allocator.
313    #[inline(always)]
314    pub fn try_new_in(allocator: A) -> Result<Self, AllocError> {
315        Ok(RingAlloc {
316            inner: Rings::try_new_in(allocator)?,
317        })
318    }
319
320    /// Attempts to allocate a block of memory with this ring-allocator.
321    /// Returns a pointer to the beginning of the block if successful.
322    #[inline(always)]
323    pub fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
324        // Safety: `self.inner` is valid pointer to `Rings`
325        let inner = unsafe { self.inner.as_ref() };
326        if layout_max(layout) <= TINY_ALLOCATION_MAX_SIZE {
327            Self::_allocate(&inner.tiny_ring, layout, &inner.allocator)
328        } else if layout_max(layout) <= SMALL_ALLOCATION_MAX_SIZE {
329            Self::_allocate(&inner.small_ring, layout, &inner.allocator)
330        } else if layout_max(layout) <= LARGE_ALLOCATION_MAX_SIZE {
331            Self::_allocate(&inner.large_ring, layout, &inner.allocator)
332        } else {
333            inner.allocator.allocate(layout)
334        }
335    }
336
337    /// Deallocates the memory referenced by `ptr`.
338    ///
339    /// # Safety
340    ///
341    /// * `ptr` must denote a block of memory [*currently allocated*] via [`RingAlloc::allocate`], and
342    /// * `layout` must [*fit*] that block of memory.
343    ///
344    /// [*currently allocated*]: https://doc.rust-lang.org/std/alloc/trait.Allocator.html#currently-allocated-memory
345    /// [*fit*]: https://doc.rust-lang.org/std/alloc/trait.Allocator.html#memory-fitting
346    #[inline(always)]
347    pub unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
348        if layout_max(layout) <= TINY_ALLOCATION_MAX_SIZE {
349            unsafe {
350                Self::_deallocate::<{ TINY_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
351            }
352        } else if layout_max(layout) <= SMALL_ALLOCATION_MAX_SIZE {
353            unsafe {
354                Self::_deallocate::<{ SMALL_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
355            }
356        } else if layout_max(layout) <= LARGE_ALLOCATION_MAX_SIZE {
357            unsafe {
358                Self::_deallocate::<{ LARGE_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
359            }
360        } else {
361            // Safety: `self.inner` is valid pointer to `Rings`
362            let inner = unsafe { self.inner.as_ref() };
363            // Safety: `ptr` is valid pointer allocated by `self.allocator`.
364            unsafe {
365                inner.allocator.deallocate(ptr, layout);
366            }
367        }
368    }
369
370    #[inline(always)]
371    fn _allocate<const N: usize>(
372        ring: &Ring<Chunk<N>>,
373        layout: Layout,
374        allocator: &A,
375    ) -> Result<NonNull<[u8]>, AllocError> {
376        // Try head chunk.
377        if let Some(chunk_ptr) = ring.head.get() {
378            // Safety: `chunk` is valid pointer to `Chunk` allocated by `self.allocator`.
379            let chunk = unsafe { chunk_ptr.as_ref() };
380
381            match chunk.allocate(chunk_ptr, layout) {
382                Some(ptr) => {
383                    // Safety: `ptr` is valid pointer to `Chunk` allocated by `self.allocator`.
384                    // ptr is allocated to fit `layout.size()` bytes.
385                    return Ok(unsafe {
386                        NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
387                            ptr.as_ptr(),
388                            layout.size(),
389                        ))
390                    });
391                }
392                // Chunk is full. Try next one.
393                None => match chunk.next.take() {
394                    None => {
395                        debug_assert_eq!(ring.tail.get(), ring.head.get());
396                    }
397                    Some(next_ptr) => {
398                        // Move head to tail and bring next one as head.
399
400                        // Safety: tail is valid pointer to `Chunk` allocated by `self.allocator`.
401                        let tail_chunk = unsafe { ring.tail.get().unwrap().as_ref() };
402                        debug_assert_eq!(tail_chunk.next(), None);
403                        tail_chunk.next.set(Some(chunk_ptr));
404                        ring.tail.set(Some(chunk_ptr));
405                        ring.head.set(Some(next_ptr));
406
407                        let next = unsafe { next_ptr.as_ref() };
408
409                        if next.reset() {
410                            if let Some(ptr) = next.allocate(next_ptr, layout) {
411                                // Safety: `ptr` is valid pointer to `Chunk` allocated by `self.allocator`.
412                                // ptr is allocated to fit `layout.size()` bytes.
413                                return Ok(unsafe {
414                                    NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
415                                        ptr.as_ptr(),
416                                        layout.size(),
417                                    ))
418                                });
419                            }
420                        }
421
422                        // Not ready yet. Allocate new chunk.
423                    }
424                },
425            }
426        } else {
427            debug_assert_eq!(ring.tail.get(), None);
428        }
429
430        let chunk_ptr = Chunk::<N>::new(allocator)?;
431
432        // Safety: `chunk` is valid pointer to `Chunk` allocated by `self.allocator`.
433        let chunk = unsafe { chunk_ptr.as_ref() };
434
435        let ptr = chunk
436            .allocate(chunk_ptr, layout)
437            .expect("Failed to allocate from fresh chunk");
438
439        // Put to head.
440        chunk.next.set(ring.head.get());
441
442        // If first chunk, put to tail.
443        if ring.tail.get().is_none() {
444            debug_assert_eq!(ring.head.get(), None);
445
446            // Modify after asserts.
447            ring.tail.set(Some(chunk_ptr));
448        } else {
449            debug_assert!(ring.head.get().is_some());
450        }
451
452        // Modify after asserts.
453        ring.head.set(Some(chunk_ptr));
454
455        // Safety: `ptr` is valid pointer to `Chunk` allocated by `self.allocator`.
456        // ptr is allocated to fit `layout.size()` bytes.
457        Ok(unsafe {
458            NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
459                ptr.as_ptr(),
460                layout.size(),
461            ))
462        })
463    }
464
465    #[inline(always)]
466    unsafe fn _deallocate<const N: usize>(ptr: NonNull<u8>, layout: Layout) {
467        // Safety: `ptr` is valid pointer allocated from alive `Chunk`.
468        unsafe {
469            Chunk::<N>::deallocate(ptr.as_ptr(), layout);
470        }
471    }
472
473    /// Free all unused chunks back to underlying allocator.
474    pub fn flush(&self) {
475        // Safety: `self.inner` is valid pointer to `Rings`
476        let inner = unsafe { self.inner.as_ref() };
477        inner.clean_all();
478    }
479}
480
481unsafe impl<A> Allocator for RingAlloc<A>
482where
483    A: Allocator,
484{
485    #[inline(always)]
486    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
487        self.allocate(layout)
488    }
489
490    #[inline(always)]
491    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
492        // Safety: covered by `Allocator::deallocate` contract.
493        unsafe { self.deallocate(ptr, layout) }
494    }
495
496    // TODO: Implement grow and shrink.
497}