blink_alloc/
cache.rs

1use core::{
2    cell::UnsafeCell,
3    mem::{replace, ManuallyDrop, MaybeUninit},
4    sync::atomic::{AtomicUsize, Ordering},
5};
6
7use alloc::vec::Vec;
8
9use allocator_api2::alloc::{Allocator, Global};
10use parking_lot::RwLock;
11
12use crate::local::BlinkAlloc;
13
14struct Inner<A: Allocator> {
15    /// Array of [`BlinkAlloc`] instances ready to pop.
16    pop_array: Vec<UnsafeCell<ManuallyDrop<BlinkAlloc<A>>>>,
17
18    /// Index of the next pop [`BlinkAlloc`] instance to pop.
19    next_pop: AtomicUsize,
20
21    /// Array of [`BlinkAlloc`] instances that are released.
22    push_array: Vec<UnsafeCell<MaybeUninit<BlinkAlloc<A>>>>,
23
24    /// Index to push next [`BlinkAlloc`] instance.
25    next_push: AtomicUsize,
26}
27
28unsafe impl<A> Sync for Inner<A>
29where
30    A: Allocator,
31    BlinkAlloc<A>: Send,
32{
33}
34
35/// Multi-thread cache for [`BlinkAlloc`] instances.
36/// Stores pushed [`BlinkAlloc`] instances and returns them on pop.
37/// Blink-allocators are kept warm in the cache.
38///
39/// This type is internally synchronized with hybrid
40/// blocking + wait-free algorithm.
41pub struct BlinkAllocCache<A: Allocator = Global> {
42    inner: RwLock<Inner<A>>,
43}
44
45impl<A> Default for BlinkAllocCache<A>
46where
47    A: Allocator,
48{
49    fn default() -> Self {
50        Self::new()
51    }
52}
53
54impl<A> BlinkAllocCache<A>
55where
56    A: Allocator,
57{
58    /// Creates a new empty [`BlinkAllocCache`].
59    pub const fn new() -> Self {
60        BlinkAllocCache {
61            inner: RwLock::new(Inner {
62                pop_array: Vec::new(),
63                next_pop: AtomicUsize::new(0),
64                push_array: Vec::new(),
65                next_push: AtomicUsize::new(0),
66            }),
67        }
68    }
69
70    /// Acquires some [`BlinkAlloc`] instance from the cache.
71    /// Returns none if the cache is empty.
72    pub fn pop(&self) -> Option<BlinkAlloc<A>> {
73        let inner = self.inner.read();
74
75        if !inner.pop_array.is_empty() {
76            // Acquire index to pop.
77            let idx = inner.next_pop.fetch_add(1, Ordering::Acquire);
78
79            if idx < inner.pop_array.len() {
80                // Pop the [`BlinkAlloc`] instance.
81
82                // Safety: Acquired exclusive index to this instance.
83                let blink = unsafe { ManuallyDrop::take(&mut *inner.pop_array[idx].get()) };
84
85                return Some(blink);
86            }
87
88            prevent_overflow(&inner.next_pop, idx, inner.pop_array.len());
89        }
90
91        if inner.next_push.load(Ordering::Relaxed) == 0 {
92            return None;
93        }
94
95        drop(inner);
96        let mut inner = self.inner.write();
97
98        Self::flush(&mut inner);
99
100        inner
101            .pop_array
102            .pop()
103            .map(|cell| ManuallyDrop::into_inner(cell.into_inner()))
104    }
105
106    pub fn push(&self, blink: BlinkAlloc<A>) {
107        let inner = self.inner.read();
108
109        if !inner.push_array.is_empty() {
110            // Acquire index to push.
111            let idx = inner.next_push.fetch_add(1, Ordering::Acquire);
112
113            if idx < inner.push_array.len() {
114                // Push the [`BlinkAlloc`] instance.
115
116                // Safety: Acquired exclusive index to this instance.
117                MaybeUninit::write(unsafe { &mut *inner.push_array[idx].get() }, blink);
118                return;
119            }
120
121            prevent_overflow(&inner.next_push, idx, inner.push_array.len());
122        }
123
124        drop(inner);
125        let mut inner = self.inner.write();
126
127        Self::flush(&mut inner);
128
129        inner
130            .pop_array
131            .push(UnsafeCell::new(ManuallyDrop::new(blink)));
132    }
133
134    fn flush(inner: &mut Inner<A>) {
135        let pushed = replace(inner.next_push.get_mut(), 0).min(inner.push_array.len());
136        let popped = replace(inner.next_pop.get_mut(), 0).min(inner.pop_array.len());
137
138        let pushed_iter = inner.push_array.drain(..pushed);
139
140        inner.pop_array.splice(
141            ..popped,
142            pushed_iter.map(|cell| {
143                // rustc, please, optimized this call to no-op.
144
145                // Safety: `next_push` equals the number of elements for which push started.
146                // Exclusive lock ensures initialization finished.
147                UnsafeCell::new(ManuallyDrop::new(unsafe {
148                    cell.into_inner().assume_init()
149                }))
150            }),
151        );
152    }
153}
154
155fn prevent_overflow(atomic: &AtomicUsize, current: usize, upper: usize) {
156    #[cold]
157    fn cold_store(atomic: &AtomicUsize, upper: usize) {
158        atomic.store(upper, Ordering::Relaxed);
159    }
160
161    if current >= isize::MAX as usize {
162        cold_store(atomic, upper);
163    }
164}