1use core::{
2 alloc::Layout, cell::Cell, hint::unreachable_unchecked, ptr::NonNull, sync::atomic::AtomicUsize,
3};
4use std::thread_local;
5
6use allocator_api2::alloc::{AllocError, Allocator, Global};
7use parking_lot::Mutex;
8
9use crate::layout_max;
10
11type Chunk<const N: usize> = crate::chunk::Chunk<AtomicUsize, N>;
12
13const TINY_ALLOCATION_MAX_SIZE: usize = 16;
15
16const TINY_ALLOCATION_CHUNK_SIZE: usize = 16384;
18
19const SMALL_ALLOCATION_MAX_SIZE: usize = 256;
21
22const SMALL_ALLOCATION_CHUNK_SIZE: usize = 65536;
24
25const LARGE_ALLOCATION_MAX_SIZE: usize = 65536;
27
28const LARGE_ALLOCATION_CHUNK_SIZE: usize = 2097152;
30
31type TinyChunk = Chunk<{ TINY_ALLOCATION_CHUNK_SIZE }>;
32type SmallChunk = Chunk<{ SMALL_ALLOCATION_CHUNK_SIZE }>;
33type LargeChunk = Chunk<{ LARGE_ALLOCATION_CHUNK_SIZE }>;
34
35struct LocalRing<T> {
36 head: Cell<Option<NonNull<T>>>,
40
41 tail: Cell<Option<NonNull<T>>>,
43}
44
45impl<T> LocalRing<T> {
46 const fn new() -> Self {
47 LocalRing {
48 head: Cell::new(None),
49 tail: Cell::new(None),
50 }
51 }
52}
53
54struct GlobalRing<T> {
55 head: Option<NonNull<T>>,
59
60 tail: Option<NonNull<T>>,
62}
63
64impl<T> GlobalRing<T> {
65 const fn new() -> Self {
66 GlobalRing {
67 head: None,
68 tail: None,
69 }
70 }
71}
72
73struct GlobalRings {
74 tiny_ring: Mutex<GlobalRing<TinyChunk>>,
75 small_ring: Mutex<GlobalRing<SmallChunk>>,
76 large_ring: Mutex<GlobalRing<LargeChunk>>,
77}
78
79impl Drop for GlobalRings {
80 fn drop(&mut self) {
81 Self::clean(self.tiny_ring.get_mut());
82 Self::clean(self.small_ring.get_mut());
83 Self::clean(self.large_ring.get_mut());
84 }
85}
86
87impl GlobalRings {
88 #[inline(always)]
89 fn clean_all(&self) {
90 Self::clean(&mut self.tiny_ring.lock());
91 Self::clean(&mut self.small_ring.lock());
92 Self::clean(&mut self.large_ring.lock());
93 }
94
95 #[inline(always)]
96 fn clean<const N: usize>(ring: &mut GlobalRing<Chunk<N>>) {
97 let mut chunk = &mut ring.head;
98
99 while let Some(mut c) = *chunk {
100 if unsafe { c.as_ref().unused() } {
101 *chunk = unsafe { c.as_mut().next() };
103
104 unsafe {
106 Chunk::free(c, Global);
107 }
108 } else {
109 chunk = unsafe { c.as_mut().next.get_mut() };
111 }
112 }
113
114 if ring.head.is_none() {
115 ring.tail = None;
116 }
117 }
118}
119
120unsafe impl Send for GlobalRings {}
121unsafe impl Sync for GlobalRings {}
122
123struct LocalRings {
124 tiny_ring: LocalRing<TinyChunk>,
125 small_ring: LocalRing<SmallChunk>,
126 large_ring: LocalRing<LargeChunk>,
127}
128
129impl Drop for LocalRings {
130 fn drop(&mut self) {
131 self.clean_all();
132 self.flush_all();
133 }
134}
135
136impl LocalRings {
137 #[inline(always)]
138 fn clean_all(&self) {
139 Self::clean(&self.tiny_ring);
140 Self::clean(&self.small_ring);
141 Self::clean(&self.large_ring);
142 }
143
144 #[inline(always)]
145 fn clean<const N: usize>(ring: &LocalRing<Chunk<N>>) {
146 let mut chunk = &ring.head;
147
148 while let Some(c) = chunk.get() {
149 if unsafe { c.as_ref().unused() } {
150 chunk.set(unsafe { c.as_ref().next() });
152
153 unsafe {
155 Chunk::free(c, Global);
156 }
157 } else {
158 chunk = unsafe { &c.as_ref().next };
160 }
161 }
162
163 if ring.head.get().is_none() {
164 ring.tail.set(None);
165 }
166 }
167
168 #[inline(always)]
169 fn flush_all(&mut self) {
170 Self::flush(&mut self.tiny_ring, &GLOBAL_RINGS.tiny_ring);
171 Self::flush(&mut self.small_ring, &GLOBAL_RINGS.small_ring);
172 Self::flush(&mut self.large_ring, &GLOBAL_RINGS.large_ring);
173 }
174
175 #[inline(always)]
176 fn flush<const N: usize>(ring: &mut LocalRing<Chunk<N>>, global: &Mutex<GlobalRing<Chunk<N>>>) {
177 match (ring.head.take(), ring.tail.take()) {
178 (None, None) => {}
179 (Some(head), Some(tail)) => {
180 let mut global = global.lock();
181
182 match (global.head, global.tail) {
183 (None, None) => {
184 global.head = Some(head);
185 global.tail = Some(tail);
186 }
187 (Some(_g_head), Some(mut g_tail)) => unsafe {
188 *g_tail.as_mut().next.get_mut() = Some(head);
189 global.tail = Some(tail);
190 },
191 _ => unsafe { unreachable_unchecked() },
192 }
193 }
194 _ => unsafe { unreachable_unchecked() },
195 }
196 }
197}
198
199thread_local! {
200 static LOCAL_RINGS: LocalRings = const { LocalRings {
201 tiny_ring: LocalRing::new(),
202 small_ring: LocalRing::new(),
203 large_ring: LocalRing::new(),
204 } };
205}
206
207static GLOBAL_RINGS: GlobalRings = GlobalRings {
208 tiny_ring: Mutex::new(GlobalRing::new()),
209 small_ring: Mutex::new(GlobalRing::new()),
210 large_ring: Mutex::new(GlobalRing::new()),
211};
212
213#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct OneRingAlloc;
232
233#[inline(always)]
234fn _allocate<const N: usize>(
235 ring: &LocalRing<Chunk<N>>,
236 global: &Mutex<GlobalRing<Chunk<N>>>,
237 layout: Layout,
238) -> Result<NonNull<[u8]>, AllocError> {
239 if let Some(chunk_ptr) = ring.head.get() {
241 let chunk = unsafe { chunk_ptr.as_ref() };
243
244 match chunk.allocate(chunk_ptr, layout) {
245 Some(ptr) => {
246 return Ok(unsafe {
249 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
250 ptr.as_ptr(),
251 layout.size(),
252 ))
253 });
254 }
255 None => match chunk.next.take() {
257 None => {
258 debug_assert_eq!(ring.tail.get(), ring.head.get());
259 }
260 Some(next_ptr) => {
261 let tail_chunk = unsafe { ring.tail.get().unwrap().as_ref() };
265 debug_assert_eq!(tail_chunk.next(), None);
266 tail_chunk.next.set(Some(chunk_ptr));
267 ring.tail.set(Some(chunk_ptr));
268 ring.head.set(Some(next_ptr));
269
270 let next = unsafe { next_ptr.as_ref() };
271
272 if next.reset() {
273 if let Some(ptr) = next.allocate(next_ptr, layout) {
274 return Ok(unsafe {
277 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
278 ptr.as_ptr(),
279 layout.size(),
280 ))
281 });
282 }
283 }
284
285 }
287 },
288 }
289 } else {
290 debug_assert_eq!(ring.tail.get(), None);
291 }
292
293 let (g_head, g_tail) = {
295 let mut global = global.lock();
296
297 (global.head.take(), global.tail.take())
299 };
300
301 let ptr = match (g_head, g_tail) {
302 (None, None) => None,
303 (Some(g_head), Some(mut g_tail)) => {
304 let ptr = unsafe { g_head.as_ref().allocate(g_head, layout) };
305
306 match (ring.head.get(), ring.tail.get()) {
307 (None, None) => {
308 ring.head.set(Some(g_head));
309 ring.tail.set(Some(g_tail));
310 }
311 (Some(head), Some(_tail)) => unsafe {
312 *g_tail.as_mut().next.get_mut() = Some(head);
313 ring.head.set(Some(g_head));
314 },
315 _ => unsafe { unreachable_unchecked() },
316 }
317
318 ptr
319 }
320 _ => unsafe { unreachable_unchecked() },
321 };
322
323 let ptr = match ptr {
324 None => {
325 let chunk_ptr = Chunk::<N>::new(Global)?;
326
327 let chunk = unsafe { chunk_ptr.as_ref() };
329
330 let ptr = chunk
331 .allocate(chunk_ptr, layout)
332 .expect("Failed to allocate from fresh chunk");
333
334 chunk.next.set(ring.head.get());
336
337 if ring.tail.get().is_none() {
339 debug_assert_eq!(ring.head.get(), None);
340
341 ring.tail.set(Some(chunk_ptr));
343 } else {
344 debug_assert!(ring.head.get().is_some());
345 }
346
347 ring.head.set(Some(chunk_ptr));
349
350 ptr
351 }
352 Some(ptr) => ptr,
353 };
354
355 Ok(unsafe {
358 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
359 ptr.as_ptr(),
360 layout.size(),
361 ))
362 })
363}
364
365#[inline(always)]
366unsafe fn _deallocate<const N: usize>(ptr: NonNull<u8>, layout: Layout) {
367 unsafe {
369 Chunk::<N>::deallocate(ptr.as_ptr(), layout);
370 }
371}
372
373impl OneRingAlloc {
374 #[inline(always)]
377 pub fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
378 if layout_max(layout) <= TINY_ALLOCATION_MAX_SIZE {
379 LOCAL_RINGS
380 .try_with(|rings| _allocate(&rings.tiny_ring, &GLOBAL_RINGS.tiny_ring, layout))
381 .unwrap_or(Err(AllocError))
382 } else if layout_max(layout) <= SMALL_ALLOCATION_MAX_SIZE {
383 LOCAL_RINGS
384 .try_with(|rings| _allocate(&rings.small_ring, &GLOBAL_RINGS.small_ring, layout))
385 .unwrap_or(Err(AllocError))
386 } else if layout_max(layout) <= LARGE_ALLOCATION_MAX_SIZE {
387 LOCAL_RINGS
388 .try_with(|rings| _allocate(&rings.large_ring, &GLOBAL_RINGS.large_ring, layout))
389 .unwrap_or(Err(AllocError))
390 } else {
391 Global.allocate(layout)
392 }
393 }
394
395 #[inline(always)]
406 pub unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
407 if layout_max(layout) <= TINY_ALLOCATION_MAX_SIZE {
408 unsafe {
409 _deallocate::<{ TINY_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
410 }
411 } else if layout_max(layout) <= SMALL_ALLOCATION_MAX_SIZE {
412 unsafe {
413 _deallocate::<{ SMALL_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
414 }
415 } else if layout_max(layout) <= LARGE_ALLOCATION_MAX_SIZE {
416 unsafe {
417 _deallocate::<{ LARGE_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
418 }
419 } else {
420 unsafe { Global.deallocate(ptr, layout) }
421 }
422 }
423
424 pub fn clean_global(&self) {
438 GLOBAL_RINGS.clean_all();
439 }
440
441 pub fn clean_local(&self) {
452 LOCAL_RINGS.with(|rings| rings.clean_all());
453 }
454}
455
456unsafe impl Allocator for OneRingAlloc {
457 #[inline(always)]
458 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
459 self.allocate(layout)
460 }
461
462 #[inline(always)]
463 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
464 unsafe {
465 self.deallocate(ptr, layout);
466 }
467 }
468}