ring_alloc/
chunk.rs

1use core::{
2    alloc::Layout,
3    cell::Cell,
4    mem::{align_of, size_of},
5    ptr::NonNull,
6    sync::atomic::Ordering,
7};
8
9use allocator_api2::alloc::{AllocError, Allocator};
10
11use crate::{addr, cold, with_addr_mut, ImUsize};
12
13#[repr(C)]
14#[derive(Debug)]
15pub(crate) struct Chunk<T, const N: usize> {
16    pub cursor: Cell<*mut u8>,
17    pub freed: T,
18    pub next: Cell<Option<NonNull<Chunk<T, N>>>>,
19}
20
21impl<T, const N: usize> Chunk<T, N>
22where
23    T: ImUsize,
24{
25    const SIZE: usize = N;
26
27    const ALIGNMENT: usize = align_of::<Self>();
28
29    const LAYOUT: Layout = match Layout::from_size_align(Self::SIZE, Self::ALIGNMENT) {
30        Ok(layout) => layout,
31        Err(_) => panic!("Invalid chunk size"),
32    };
33
34    const LAYOUT_IS_VALID: bool = {
35        if Self::SIZE < size_of::<Self>() {
36            panic!("Chunk size is too small");
37        }
38        if Self::ALIGNMENT < align_of::<Self>() {
39            panic!("Chunk alignment is too small");
40        }
41        true
42    };
43
44    pub fn new<'a, A>(alloc: A) -> Result<NonNull<Self>, AllocError>
45    where
46        A: Allocator + 'a,
47    {
48        debug_assert!(Self::LAYOUT_IS_VALID);
49
50        let ptr = alloc.allocate(Self::LAYOUT)?.cast::<Self>();
51        let memory = unsafe { ptr.as_ptr().add(1).cast::<u8>() };
52
53        // Safety: Writing into memory allocated for `Chunk`.
54        unsafe {
55            ptr.as_ptr().write(Chunk {
56                cursor: Cell::new(memory),
57                freed: T::new(addr(memory)),
58                next: Cell::new(None),
59            });
60        }
61
62        Ok(ptr.cast())
63    }
64
65    /// # Safety
66    ///
67    /// `ptr` must be valid pointer to `Self` allocated by `alloc` using same allocator
68    /// or compatible one.
69    pub unsafe fn free<A>(ptr: NonNull<Self>, alloc: A)
70    where
71        A: Allocator,
72    {
73        // Safety: `ptr` is valid pointer to `Self` allocated by `alloc`.
74        unsafe {
75            alloc.deallocate(ptr.cast(), Self::LAYOUT);
76        }
77    }
78
79    fn chunk_addr(&self) -> usize {
80        addr(self as *const Self)
81    }
82
83    fn base_addr(&self) -> usize {
84        self.chunk_addr() + size_of::<Self>()
85    }
86
87    fn end_addr(&self) -> usize {
88        self.chunk_addr() + N
89    }
90
91    // unsafe fn with_addr(&self, addr: usize) -> *mut u8 {
92    //     unsafe { with_addr_mut(self.memory.get().cast(), addr) }
93    // }
94
95    /// Returns pointer to the next chunk in the ring.
96    pub fn next(&self) -> Option<NonNull<Self>> {
97        self.next.get()
98    }
99
100    /// Returns cursor position in the chunk.
101    fn cursor(&self) -> &Cell<*mut u8> {
102        &self.cursor
103    }
104
105    /// Returns free "cursor" position in the chunk.
106    fn freed(&self) -> &T {
107        &self.freed
108    }
109}
110
111impl<T, const N: usize> Chunk<T, N>
112where
113    T: ImUsize,
114{
115    /// Checks if chunk is unused.
116    /// This state can be changed by calling `allocate`.
117    ///
118    /// If chunk is potentially shared, this method may return `true`
119    /// while another thread is allocating from this chunk.
120    #[inline(always)]
121    pub fn unused(&self) -> bool {
122        self.freed().load(Ordering::Acquire) == addr(self.cursor().get())
123    }
124
125    /// Resets chunk to unused state.
126    /// If there are currently allocated blocks from this chunk,
127    /// returns `false`.
128    /// Otherwise resets chunk's cursor to the beginning of the chunk
129    /// and returns `true`.
130    #[inline(always)]
131    pub fn reset(&self) -> bool {
132        let mut cursor = self.cursor().get();
133        if self.freed().load(Ordering::Acquire) == addr(cursor) {
134            // Safety: base_addr is beginning of the chunk memory
135            // and cursor is within the chunk memory.
136            cursor = unsafe { with_addr_mut(cursor, self.base_addr()) };
137            self.freed().store(addr(cursor), Ordering::Relaxed);
138            self.cursor().set(cursor);
139            true
140        } else {
141            cold();
142            false
143        }
144    }
145
146    #[inline(always)]
147    fn _allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
148        let cursor = self.cursor().get();
149
150        let aligned = addr(cursor).checked_add(layout.align() - 1)? & !(layout.align() - 1);
151        let new_cursor = aligned.checked_add(layout.size())?;
152        if new_cursor > self.end_addr() {
153            // cold();
154            return None;
155        }
156
157        // Safety: `aligned` is within the chunk.
158        let ptr = unsafe { with_addr_mut(cursor, aligned) };
159
160        // Safety: `new_cursor` is within the chunk.
161        let new_cursor = unsafe { with_addr_mut(cursor, new_cursor) };
162        self.cursor().set(new_cursor);
163
164        // Safety: `freed` is always not greater than `cursor`.
165        // So this cannot overflow.
166        let overhead = aligned - addr(cursor);
167        self.freed().fetch_add(overhead, Ordering::Relaxed);
168
169        // Safety: Range form `ptr` to `ptr + layout.size()` is within the chunk.
170        Some(unsafe { NonNull::new_unchecked(ptr) })
171    }
172
173    #[inline(always)]
174    pub fn allocate(&self, chunk_ptr: NonNull<Self>, layout: Layout) -> Option<NonNull<u8>> {
175        let (meta_layout, offset) = Layout::new::<NonNull<Self>>().extend(layout).ok()?;
176        let ptr = self._allocate(meta_layout)?;
177
178        // Safety: `ptr` is allocated to contain `usize` followed with memory for `layout`.
179        unsafe {
180            ptr.as_ptr().cast::<NonNull<Self>>().write(chunk_ptr);
181        }
182
183        // Safety: offset for `layout` in `meta_layout` used to calculate `ptr`.
184        let ptr = unsafe { ptr.as_ptr().add(offset) };
185
186        // Safety: `ptr` is allocation for `layout`.
187        Some(unsafe { NonNull::new_unchecked(ptr) })
188    }
189
190    #[inline(always)]
191    unsafe fn _deallocate(&self, size: usize) {
192        // Safety: `freed` is always less than `cursor - size`.
193        // Sync with `Acquire` in `allocate`.
194        self.freed().fetch_add(size, Ordering::Release);
195    }
196
197    #[inline(always)]
198    pub unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
199        let (meta_layout, offset) = Layout::new::<NonNull<Self>>().extend(layout).unwrap();
200
201        let meta_ptr = unsafe { ptr.sub(offset) }.cast::<NonNull<Self>>();
202        let chunk_ptr = unsafe { *meta_ptr };
203
204        // Safety: chunk is alive since `ptr` is alive.
205        let chunk = unsafe { chunk_ptr.as_ref() };
206        unsafe {
207            chunk._deallocate(meta_layout.size());
208        }
209    }
210}