1use core::{
2 cell::Cell,
3 hash::{Hash, Hasher},
4 mem::ManuallyDrop,
5 ptr::NonNull,
6};
7
8use allocator_api2::alloc::{AllocError, Allocator, Layout};
9
10use crate::layout_max;
11
12type Chunk<const N: usize> = crate::chunk::Chunk<Cell<usize>, { N }>;
13
14const TINY_ALLOCATION_MAX_SIZE: usize = 16;
16
17const TINY_ALLOCATION_CHUNK_SIZE: usize = 16384;
19
20const SMALL_ALLOCATION_MAX_SIZE: usize = 256;
22
23const SMALL_ALLOCATION_CHUNK_SIZE: usize = 65536;
25
26const LARGE_ALLOCATION_MAX_SIZE: usize = 65536;
28
29const LARGE_ALLOCATION_CHUNK_SIZE: usize = 2097152;
31
32#[cfg(not(feature = "alloc"))]
33macro_rules! ring_alloc {
34 ($(#[$meta:meta])* pub struct $ring_alloc:ident;) => {
35 $(#[$meta])*
36 #[repr(transparent)]
37 pub struct $ring_alloc<A: Allocator> {
38 inner: NonNull<Rings<A>>,
39 }
40 };
41}
42
43#[cfg(feature = "alloc")]
44macro_rules! ring_alloc {
45 ($(#[$meta:meta])* pub struct $ring_alloc:ident;) => {
46 $(#[$meta])*
47 #[repr(transparent)]
48 #[must_use]
49 pub struct $ring_alloc<A: Allocator = allocator_api2::alloc::Global> {
50 inner: NonNull<Rings<A>>,
51 }
52 };
53}
54
55ring_alloc! {
56 pub struct RingAlloc;
65}
66
67impl<A> Clone for RingAlloc<A>
68where
69 A: Allocator,
70{
71 #[inline(always)]
72 fn clone(&self) -> Self {
73 Rings::inc_ref(self.inner);
74 RingAlloc { inner: self.inner }
75 }
76
77 #[inline(always)]
78 fn clone_from(&mut self, source: &Self) {
79 Rings::inc_ref(source.inner);
80 self.inner = source.inner;
81 }
82}
83
84impl<A> PartialEq for RingAlloc<A>
85where
86 A: Allocator,
87{
88 #[inline(always)]
89 fn eq(&self, other: &Self) -> bool {
90 self.inner == other.inner
91 }
92}
93
94impl<A> Hash for RingAlloc<A>
95where
96 A: Allocator,
97{
98 #[inline(always)]
99 fn hash<H: Hasher>(&self, state: &mut H) {
100 self.inner.hash(state);
101 }
102}
103
104impl<A> Drop for RingAlloc<A>
105where
106 A: Allocator,
107{
108 #[inline(always)]
109 fn drop(&mut self) {
110 Rings::dec_ref(self.inner);
111 }
112}
113
114type TinyChunk = Chunk<{ TINY_ALLOCATION_CHUNK_SIZE }>;
115type SmallChunk = Chunk<{ SMALL_ALLOCATION_CHUNK_SIZE }>;
116type LargeChunk = Chunk<{ LARGE_ALLOCATION_CHUNK_SIZE }>;
117
118struct Ring<T> {
119 head: Cell<Option<NonNull<T>>>,
123
124 tail: Cell<Option<NonNull<T>>>,
126}
127
128impl<T> Ring<T> {
129 const fn new() -> Self {
130 Ring {
131 head: Cell::new(None),
132 tail: Cell::new(None),
133 }
134 }
135}
136
137struct Rings<A: Allocator> {
138 tiny_ring: Ring<TinyChunk>,
139 small_ring: Ring<SmallChunk>,
140 large_ring: Ring<LargeChunk>,
141 allocator: ManuallyDrop<A>,
142 ref_cnt: Cell<usize>,
143}
144
145impl<A> Rings<A>
146where
147 A: Allocator,
148{
149 #[inline(always)]
150 fn try_new_in(allocator: A) -> Result<NonNull<Self>, AllocError> {
151 let ptr = allocator.allocate(Layout::new::<Self>())?;
152 let inner = Rings {
153 tiny_ring: Ring::new(),
154 small_ring: Ring::new(),
155 large_ring: Ring::new(),
156 allocator: ManuallyDrop::new(allocator),
157 ref_cnt: Cell::new(1),
158 };
159
160 let ptr = ptr.cast::<Self>();
161
162 unsafe {
164 core::ptr::write(ptr.as_ptr(), inner);
165 }
166
167 Ok(ptr)
168 }
169
170 #[inline(always)]
171 #[cfg(not(no_global_oom_handling))]
172 fn new_in(allocator: A) -> NonNull<Self> {
173 match Self::try_new_in(allocator) {
174 Ok(ptr) => ptr,
175 #[cfg(feature = "alloc")]
176 Err(AllocError) => {
177 alloc::alloc::handle_alloc_error(Layout::new::<Self>());
178 }
179 #[cfg(not(feature = "alloc"))]
180 Err(AllocError) => {
181 core::panic!("Failed to allocate Rings");
182 }
183 }
184 }
185
186 fn inc_ref(ptr: NonNull<Self>) {
187 let me = unsafe { ptr.as_ref() };
189 me.ref_cnt.set(me.ref_cnt.get() + 1);
190 }
191
192 fn dec_ref(ptr: NonNull<Self>) {
193 let me = unsafe { ptr.as_ref() };
195
196 debug_assert_ne!(me.ref_cnt.get(), 0);
197 let new_ref_cnt = me.ref_cnt.get() - 1;
198 me.ref_cnt.set(new_ref_cnt);
199
200 if new_ref_cnt == 0 {
201 Self::free(ptr);
202 }
203 }
204
205 #[cold]
206 fn free(ptr: NonNull<Self>) {
207 let me = unsafe { ptr.as_ref() };
209
210 me.free_all();
211
212 let allocator = unsafe { core::ptr::read(&*me.allocator) };
215
216 unsafe {
218 allocator.deallocate(ptr.cast(), Layout::new::<Self>());
219 }
220 }
221
222 #[inline(always)]
223 fn clean_all(&self) {
224 Self::clean(&self.tiny_ring, &self.allocator);
225 Self::clean(&self.small_ring, &self.allocator);
226 Self::clean(&self.large_ring, &self.allocator);
227 }
228
229 #[inline(always)]
230 fn clean<const N: usize>(ring: &Ring<Chunk<N>>, allocator: &A) {
231 let mut chunk = &ring.head;
232
233 while let Some(c) = chunk.get() {
234 if unsafe { c.as_ref().unused() } {
235 chunk.set(unsafe { c.as_ref().next() });
237
238 unsafe {
240 Chunk::free(c, allocator);
241 }
242 } else {
243 chunk = unsafe { &c.as_ref().next };
245 }
246 }
247
248 if ring.head.get().is_none() {
249 ring.tail.set(None);
250 }
251 }
252
253 fn free_all(&self) {
254 Self::free_chunks(&self.tiny_ring, &self.allocator);
255 Self::free_chunks(&self.small_ring, &self.allocator);
256 Self::free_chunks(&self.large_ring, &self.allocator);
257 }
258
259 #[inline(always)]
260 fn free_chunks<const N: usize>(ring: &Ring<Chunk<N>>, allocator: &A) {
261 let mut chunk = ring.head.take();
262
263 while let Some(c) = chunk {
264 chunk = unsafe { c.as_ref().next() };
266 unsafe {
268 Chunk::free(c, allocator);
269 }
270 }
271
272 ring.tail.set(None);
273 }
274}
275
276#[cfg(not(no_global_oom_handling))]
277#[cfg(feature = "alloc")]
278impl RingAlloc {
279 #[inline(always)]
281 pub fn new() -> Self {
282 RingAlloc {
283 inner: Rings::new_in(allocator_api2::alloc::Global),
284 }
285 }
286}
287
288#[cfg(not(no_global_oom_handling))]
289impl<A> Default for RingAlloc<A>
290where
291 A: Allocator + Default,
292{
293 #[inline(always)]
294 fn default() -> Self {
295 RingAlloc::new_in(A::default())
296 }
297}
298
299impl<A> RingAlloc<A>
300where
301 A: Allocator,
302{
303 #[cfg(not(no_global_oom_handling))]
305 #[inline(always)]
306 pub fn new_in(allocator: A) -> Self {
307 RingAlloc {
308 inner: Rings::new_in(allocator),
309 }
310 }
311
312 #[inline(always)]
314 pub fn try_new_in(allocator: A) -> Result<Self, AllocError> {
315 Ok(RingAlloc {
316 inner: Rings::try_new_in(allocator)?,
317 })
318 }
319
320 #[inline(always)]
323 pub fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
324 let inner = unsafe { self.inner.as_ref() };
326 if layout_max(layout) <= TINY_ALLOCATION_MAX_SIZE {
327 Self::_allocate(&inner.tiny_ring, layout, &inner.allocator)
328 } else if layout_max(layout) <= SMALL_ALLOCATION_MAX_SIZE {
329 Self::_allocate(&inner.small_ring, layout, &inner.allocator)
330 } else if layout_max(layout) <= LARGE_ALLOCATION_MAX_SIZE {
331 Self::_allocate(&inner.large_ring, layout, &inner.allocator)
332 } else {
333 inner.allocator.allocate(layout)
334 }
335 }
336
337 #[inline(always)]
347 pub unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
348 if layout_max(layout) <= TINY_ALLOCATION_MAX_SIZE {
349 unsafe {
350 Self::_deallocate::<{ TINY_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
351 }
352 } else if layout_max(layout) <= SMALL_ALLOCATION_MAX_SIZE {
353 unsafe {
354 Self::_deallocate::<{ SMALL_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
355 }
356 } else if layout_max(layout) <= LARGE_ALLOCATION_MAX_SIZE {
357 unsafe {
358 Self::_deallocate::<{ LARGE_ALLOCATION_CHUNK_SIZE }>(ptr, layout);
359 }
360 } else {
361 let inner = unsafe { self.inner.as_ref() };
363 unsafe {
365 inner.allocator.deallocate(ptr, layout);
366 }
367 }
368 }
369
370 #[inline(always)]
371 fn _allocate<const N: usize>(
372 ring: &Ring<Chunk<N>>,
373 layout: Layout,
374 allocator: &A,
375 ) -> Result<NonNull<[u8]>, AllocError> {
376 if let Some(chunk_ptr) = ring.head.get() {
378 let chunk = unsafe { chunk_ptr.as_ref() };
380
381 match chunk.allocate(chunk_ptr, layout) {
382 Some(ptr) => {
383 return Ok(unsafe {
386 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
387 ptr.as_ptr(),
388 layout.size(),
389 ))
390 });
391 }
392 None => match chunk.next.take() {
394 None => {
395 debug_assert_eq!(ring.tail.get(), ring.head.get());
396 }
397 Some(next_ptr) => {
398 let tail_chunk = unsafe { ring.tail.get().unwrap().as_ref() };
402 debug_assert_eq!(tail_chunk.next(), None);
403 tail_chunk.next.set(Some(chunk_ptr));
404 ring.tail.set(Some(chunk_ptr));
405 ring.head.set(Some(next_ptr));
406
407 let next = unsafe { next_ptr.as_ref() };
408
409 if next.reset() {
410 if let Some(ptr) = next.allocate(next_ptr, layout) {
411 return Ok(unsafe {
414 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
415 ptr.as_ptr(),
416 layout.size(),
417 ))
418 });
419 }
420 }
421
422 }
424 },
425 }
426 } else {
427 debug_assert_eq!(ring.tail.get(), None);
428 }
429
430 let chunk_ptr = Chunk::<N>::new(allocator)?;
431
432 let chunk = unsafe { chunk_ptr.as_ref() };
434
435 let ptr = chunk
436 .allocate(chunk_ptr, layout)
437 .expect("Failed to allocate from fresh chunk");
438
439 chunk.next.set(ring.head.get());
441
442 if ring.tail.get().is_none() {
444 debug_assert_eq!(ring.head.get(), None);
445
446 ring.tail.set(Some(chunk_ptr));
448 } else {
449 debug_assert!(ring.head.get().is_some());
450 }
451
452 ring.head.set(Some(chunk_ptr));
454
455 Ok(unsafe {
458 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
459 ptr.as_ptr(),
460 layout.size(),
461 ))
462 })
463 }
464
465 #[inline(always)]
466 unsafe fn _deallocate<const N: usize>(ptr: NonNull<u8>, layout: Layout) {
467 unsafe {
469 Chunk::<N>::deallocate(ptr.as_ptr(), layout);
470 }
471 }
472
473 pub fn flush(&self) {
475 let inner = unsafe { self.inner.as_ref() };
477 inner.clean_all();
478 }
479}
480
481unsafe impl<A> Allocator for RingAlloc<A>
482where
483 A: Allocator,
484{
485 #[inline(always)]
486 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
487 self.allocate(layout)
488 }
489
490 #[inline(always)]
491 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
492 unsafe { self.deallocate(ptr, layout) }
494 }
495
496 }