basedrop/shared_cell.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
use crate::{Node, Shared, SharedInner};
use core::marker::PhantomData;
use core::ptr::NonNull;
use core::sync::atomic::{fence, AtomicPtr, AtomicUsize, Ordering};
/// A thread-safe shared mutable memory location that holds a [`Shared<T>`].
///
/// `SharedCell` is designed to be low-overhead for readers at the expense of
/// somewhat higher overhead for writers.
///
/// [`Shared<T>`]: crate::Shared
pub struct SharedCell<T> {
readers: AtomicUsize,
node: AtomicPtr<Node<SharedInner<T>>>,
phantom: PhantomData<Shared<T>>,
}
unsafe impl<T: Send + Sync> Send for SharedCell<T> {}
unsafe impl<T: Send + Sync> Sync for SharedCell<T> {}
impl<T: Send + 'static> SharedCell<T> {
/// Constructs a new `SharedCell` containing `value`.
///
/// # Examples
/// ```
/// use basedrop::{Collector, Shared, SharedCell};
///
/// let collector = Collector::new();
/// let three = Shared::new(&collector.handle(), 3);
/// let cell = SharedCell::new(three);
/// ```
pub fn new(value: Shared<T>) -> SharedCell<T> {
let node = value.node.as_ptr();
core::mem::forget(value);
SharedCell {
readers: AtomicUsize::new(0),
node: AtomicPtr::new(node),
phantom: PhantomData,
}
}
}
impl<T> SharedCell<T> {
/// Gets a copy of the contained [`Shared<T>`], incrementing its reference
/// count in the process.
///
/// # Examples
/// ```
/// use basedrop::{Collector, Shared, SharedCell};
///
/// let collector = Collector::new();
/// let x = Shared::new(&collector.handle(), 3);
/// let cell = SharedCell::new(x);
///
/// let y = cell.get();
/// ```
///
/// [`Shared<T>`]: crate::Shared
pub fn get(&self) -> Shared<T> {
self.readers.fetch_add(1, Ordering::SeqCst);
let shared = Shared {
node: unsafe { NonNull::new_unchecked(self.node.load(Ordering::SeqCst)) },
phantom: PhantomData,
};
let copy = shared.clone();
core::mem::forget(shared);
self.readers.fetch_sub(1, Ordering::Relaxed);
copy
}
/// Replaces the contained [`Shared<T>`], decrementing its reference count
/// in the process.
///
/// # Examples
/// ```
/// use basedrop::{Collector, Shared, SharedCell};
///
/// let collector = Collector::new();
/// let x = Shared::new(&collector.handle(), 3);
/// let cell = SharedCell::new(x);
///
/// let y = Shared::new(&collector.handle(), 4);
/// cell.set(y);
/// ```
///
/// [`Shared<T>`]: crate::Shared
pub fn set(&self, value: Shared<T>) {
let old = self.replace(value);
core::mem::drop(old);
}
/// Replaces the contained [`Shared<T>`] and returns it.
///
/// # Examples
/// ```
/// use basedrop::{Collector, Shared, SharedCell};
///
/// let collector = Collector::new();
/// let x = Shared::new(&collector.handle(), 3);
/// let cell = SharedCell::new(x);
///
/// let y = Shared::new(&collector.handle(), 4);
/// let x = cell.replace(y);
/// ```
///
/// [`Shared<T>`]: crate::Shared
pub fn replace(&self, value: Shared<T>) -> Shared<T> {
let node = value.node.as_ptr();
core::mem::forget(value);
let old = self.node.swap(node, Ordering::AcqRel);
while self.readers.load(Ordering::Relaxed) != 0 {}
fence(Ordering::Acquire);
Shared {
node: unsafe { NonNull::new_unchecked(old) },
phantom: PhantomData,
}
}
/// Consumes the `SharedCell` and returns the contained [`Shared<T>`]. This
/// is safe because we are guaranteed to be the only holder of the
/// `SharedCell`.
///
/// # Examples
/// ```
/// use basedrop::{Collector, Shared, SharedCell};
///
/// let collector = Collector::new();
/// let x = Shared::new(&collector.handle(), 3);
/// let cell = SharedCell::new(x);
///
/// let x = cell.into_inner();
/// ```
///
/// [`Shared<T>`]: crate::Shared
pub fn into_inner(mut self) -> Shared<T> {
let node = core::mem::replace(&mut self.node, AtomicPtr::new(core::ptr::null_mut()));
core::mem::forget(self);
Shared {
node: unsafe { NonNull::new_unchecked(node.into_inner()) },
phantom: PhantomData,
}
}
}
impl<T> Drop for SharedCell<T> {
fn drop(&mut self) {
let _ = Shared {
node: unsafe { NonNull::new_unchecked(self.node.load(Ordering::Relaxed)) },
phantom: PhantomData,
};
}
}
#[cfg(test)]
mod tests {
use crate::{Collector, Shared, SharedCell};
use core::sync::atomic::{AtomicUsize, Ordering};
#[test]
fn shared_cell() {
extern crate alloc;
use alloc::sync::Arc;
struct Test(Arc<AtomicUsize>);
impl Drop for Test {
fn drop(&mut self) {
self.0.fetch_add(1, Ordering::Relaxed);
}
}
let counter = Arc::new(AtomicUsize::new(0));
let mut collector = Collector::new();
let shared = Shared::new(&collector.handle(), Test(counter.clone()));
let cell = SharedCell::new(shared);
collector.collect();
assert_eq!(counter.load(Ordering::Relaxed), 0);
let copy = cell.get();
let copy2 = cell.replace(copy);
collector.collect();
assert_eq!(counter.load(Ordering::Relaxed), 0);
core::mem::drop(cell);
collector.collect();
assert_eq!(counter.load(Ordering::Relaxed), 0);
core::mem::drop(copy2);
collector.collect();
assert_eq!(counter.load(Ordering::Relaxed), 1);
}
}