use crate::raw::{self, membarrier, Thread};
use crate::{LocalGuard, OwnedGuard};
use std::fmt;
use std::sync::OnceLock;
#[repr(transparent)]
pub struct Collector {
pub(crate) raw: raw::Collector,
}
impl Default for Collector {
fn default() -> Self {
Self::new()
}
}
impl Collector {
const DEFAULT_BATCH_SIZE: usize = 32;
pub fn new() -> Self {
membarrier::detect();
static CPUS: OnceLock<usize> = OnceLock::new();
let cpus = *CPUS.get_or_init(|| {
std::thread::available_parallelism()
.map(Into::into)
.unwrap_or(1)
});
let batch_size = cpus.max(Self::DEFAULT_BATCH_SIZE);
Self {
raw: raw::Collector::new(cpus, batch_size),
}
}
pub fn batch_size(mut self, batch_size: usize) -> Self {
self.raw.batch_size = batch_size;
self
}
#[inline]
pub fn enter(&self) -> LocalGuard<'_> {
LocalGuard::enter(self)
}
#[inline]
pub fn enter_owned(&self) -> OwnedGuard<'_> {
OwnedGuard::enter(self)
}
#[inline]
pub unsafe fn retire<T>(&self, ptr: *mut T, reclaim: unsafe fn(*mut T, &Collector)) {
debug_assert!(!ptr.is_null(), "attempted to retire a null pointer");
unsafe { self.raw.add(ptr, reclaim, Thread::current()) }
}
pub unsafe fn reclaim_all(&self) {
unsafe { self.raw.reclaim_all() };
}
pub(crate) fn from_raw(raw: &raw::Collector) -> &Collector {
unsafe { &*(raw as *const raw::Collector as *const Collector) }
}
}
impl Eq for Collector {}
impl PartialEq for Collector {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.raw.id == other.raw.id
}
}
impl fmt::Debug for Collector {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Collector")
.field("batch_size", &self.raw.batch_size)
.finish()
}
}