1use core::{
2 alloc::Layout,
3 cell::Cell,
4 mem::{align_of, size_of},
5 ptr::NonNull,
6 sync::atomic::Ordering,
7};
8
9use allocator_api2::alloc::{AllocError, Allocator};
10
11use crate::{addr, cold, with_addr_mut, ImUsize};
12
13#[repr(C)]
14#[derive(Debug)]
15pub(crate) struct Chunk<T, const N: usize> {
16 pub cursor: Cell<*mut u8>,
17 pub freed: T,
18 pub next: Cell<Option<NonNull<Chunk<T, N>>>>,
19}
20
21impl<T, const N: usize> Chunk<T, N>
22where
23 T: ImUsize,
24{
25 const SIZE: usize = N;
26
27 const ALIGNMENT: usize = align_of::<Self>();
28
29 const LAYOUT: Layout = match Layout::from_size_align(Self::SIZE, Self::ALIGNMENT) {
30 Ok(layout) => layout,
31 Err(_) => panic!("Invalid chunk size"),
32 };
33
34 const LAYOUT_IS_VALID: bool = {
35 if Self::SIZE < size_of::<Self>() {
36 panic!("Chunk size is too small");
37 }
38 if Self::ALIGNMENT < align_of::<Self>() {
39 panic!("Chunk alignment is too small");
40 }
41 true
42 };
43
44 pub fn new<'a, A>(alloc: A) -> Result<NonNull<Self>, AllocError>
45 where
46 A: Allocator + 'a,
47 {
48 debug_assert!(Self::LAYOUT_IS_VALID);
49
50 let ptr = alloc.allocate(Self::LAYOUT)?.cast::<Self>();
51 let memory = unsafe { ptr.as_ptr().add(1).cast::<u8>() };
52
53 unsafe {
55 ptr.as_ptr().write(Chunk {
56 cursor: Cell::new(memory),
57 freed: T::new(addr(memory)),
58 next: Cell::new(None),
59 });
60 }
61
62 Ok(ptr.cast())
63 }
64
65 pub unsafe fn free<A>(ptr: NonNull<Self>, alloc: A)
70 where
71 A: Allocator,
72 {
73 unsafe {
75 alloc.deallocate(ptr.cast(), Self::LAYOUT);
76 }
77 }
78
79 fn chunk_addr(&self) -> usize {
80 addr(self as *const Self)
81 }
82
83 fn base_addr(&self) -> usize {
84 self.chunk_addr() + size_of::<Self>()
85 }
86
87 fn end_addr(&self) -> usize {
88 self.chunk_addr() + N
89 }
90
91 pub fn next(&self) -> Option<NonNull<Self>> {
97 self.next.get()
98 }
99
100 fn cursor(&self) -> &Cell<*mut u8> {
102 &self.cursor
103 }
104
105 fn freed(&self) -> &T {
107 &self.freed
108 }
109}
110
111impl<T, const N: usize> Chunk<T, N>
112where
113 T: ImUsize,
114{
115 #[inline(always)]
121 pub fn unused(&self) -> bool {
122 self.freed().load(Ordering::Acquire) == addr(self.cursor().get())
123 }
124
125 #[inline(always)]
131 pub fn reset(&self) -> bool {
132 let mut cursor = self.cursor().get();
133 if self.freed().load(Ordering::Acquire) == addr(cursor) {
134 cursor = unsafe { with_addr_mut(cursor, self.base_addr()) };
137 self.freed().store(addr(cursor), Ordering::Relaxed);
138 self.cursor().set(cursor);
139 true
140 } else {
141 cold();
142 false
143 }
144 }
145
146 #[inline(always)]
147 fn _allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
148 let cursor = self.cursor().get();
149
150 let aligned = addr(cursor).checked_add(layout.align() - 1)? & !(layout.align() - 1);
151 let new_cursor = aligned.checked_add(layout.size())?;
152 if new_cursor > self.end_addr() {
153 return None;
155 }
156
157 let ptr = unsafe { with_addr_mut(cursor, aligned) };
159
160 let new_cursor = unsafe { with_addr_mut(cursor, new_cursor) };
162 self.cursor().set(new_cursor);
163
164 let overhead = aligned - addr(cursor);
167 self.freed().fetch_add(overhead, Ordering::Relaxed);
168
169 Some(unsafe { NonNull::new_unchecked(ptr) })
171 }
172
173 #[inline(always)]
174 pub fn allocate(&self, chunk_ptr: NonNull<Self>, layout: Layout) -> Option<NonNull<u8>> {
175 let (meta_layout, offset) = Layout::new::<NonNull<Self>>().extend(layout).ok()?;
176 let ptr = self._allocate(meta_layout)?;
177
178 unsafe {
180 ptr.as_ptr().cast::<NonNull<Self>>().write(chunk_ptr);
181 }
182
183 let ptr = unsafe { ptr.as_ptr().add(offset) };
185
186 Some(unsafe { NonNull::new_unchecked(ptr) })
188 }
189
190 #[inline(always)]
191 unsafe fn _deallocate(&self, size: usize) {
192 self.freed().fetch_add(size, Ordering::Release);
195 }
196
197 #[inline(always)]
198 pub unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
199 let (meta_layout, offset) = Layout::new::<NonNull<Self>>().extend(layout).unwrap();
200
201 let meta_ptr = unsafe { ptr.sub(offset) }.cast::<NonNull<Self>>();
202 let chunk_ptr = unsafe { *meta_ptr };
203
204 let chunk = unsafe { chunk_ptr.as_ref() };
206 unsafe {
207 chunk._deallocate(meta_layout.size());
208 }
209 }
210}