#![warn(
clippy::pedantic,
rust_2018_idioms,
missing_docs,
unused_qualifications,
unsafe_op_in_unsafe_fn
)]
use std::sync::atomic;
use std::sync::atomic::AtomicBool;
use try_rwlock::TryRwLock;
mod concurrent_list;
use concurrent_list::ConcurrentList;
mod concurrent_slice;
use concurrent_slice::ConcurrentSlice;
mod concurrent_vec;
use concurrent_vec::ConcurrentVec;
mod inner;
use inner::Inner;
#[derive(Debug, Default)]
pub struct Bin<'a> {
inner: TryRwLock<Inner<'a>>,
clear: AtomicBool,
}
impl<'a> Bin<'a> {
#[must_use]
pub const fn new() -> Self {
Self {
inner: TryRwLock::new(Inner::new()),
clear: AtomicBool::new(false),
}
}
pub fn add<T: Send + 'a>(&self, value: T) {
if let Some(inner) = self.inner.try_read() {
inner.add(value);
} else {
}
self.try_clear();
}
pub fn clear(&self) {
self.clear.store(true, atomic::Ordering::Relaxed);
self.try_clear();
}
fn try_clear(&self) {
if self.clear.load(atomic::Ordering::Relaxed) {
if let Some(mut inner) = self.inner.try_write() {
self.clear.store(false, atomic::Ordering::Relaxed);
inner.clear();
}
}
}
#[must_use]
pub fn size(&self) -> usize {
self.inner.try_read().map_or(0, |inner| inner.size())
}
}
impl<'a> Drop for Bin<'a> {
fn drop(&mut self) {
self.inner.get_mut().clear();
}
}
#[cfg(test)]
mod tests {
use crate::test_util::assert_thread_safe;
use crate::test_util::CallOnDrop;
use crate::Bin;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering::SeqCst;
#[test]
fn clear() {
let destructor_called = AtomicBool::new(false);
let bin = Bin::new();
bin.add(CallOnDrop(
|| assert!(!destructor_called.swap(true, SeqCst)),
));
assert!(!destructor_called.load(SeqCst));
bin.clear();
assert!(destructor_called.load(SeqCst));
}
#[test]
#[allow(clippy::extra_unused_lifetimes)]
fn thread_safe<'a>() {
assert_thread_safe::<Bin<'a>>();
}
}
#[cfg(test)]
mod test_util {
pub(crate) fn assert_thread_safe<T: Send + Sync>() {}
pub(crate) struct CallOnDrop<T: FnMut()>(pub(crate) T);
impl<T: FnMut()> Drop for CallOnDrop<T> {
fn drop(&mut self) {
self.0();
}
}
}