use stable_deref_trait::StableDeref;
use std::alloc::Layout;
use std::borrow::Borrow;
use std::cmp::Eq;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::fmt;
use std::hash::Hash;
use std::iter::{FromIterator, IntoIterator};
use std::ops::Index;
use std::ptr::NonNull;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicPtr;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::RwLock;
use std::sync::TryLockError;
pub struct FrozenMap<K, V> {
map: RwLock<HashMap<K, V>>,
}
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for FrozenMap<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.map.try_read() {
Ok(guard) => guard.fmt(f),
Err(TryLockError::Poisoned(err)) => {
f.debug_tuple("FrozenMap").field(&&**err.get_ref()).finish()
}
Err(TryLockError::WouldBlock) => {
struct LockedPlaceholder;
impl fmt::Debug for LockedPlaceholder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("<locked>")
}
}
f.debug_tuple("FrozenMap")
.field(&LockedPlaceholder)
.finish()
}
}
}
}
impl<K, V> Default for FrozenMap<K, V> {
fn default() -> Self {
Self {
map: Default::default(),
}
}
}
impl<K, V> FrozenMap<K, V> {
pub fn new() -> Self {
Self::default()
}
}
impl<T> From<Vec<T>> for FrozenVec<T> {
fn from(vec: Vec<T>) -> Self {
Self {
vec: RwLock::new(vec),
}
}
}
impl<K: Eq + Hash, V: StableDeref> FrozenMap<K, V> {
pub fn insert(&self, k: K, v: V) -> &V::Target {
let mut map = self.map.write().unwrap();
let ret = unsafe {
let inserted = &**map.entry(k).or_insert(v);
&*(inserted as *const _)
};
ret
}
pub fn insert_with(&self, k: K, f: impl FnOnce() -> V) -> &V::Target {
let mut map = self.map.write().unwrap();
let ret = unsafe {
let inserted = &**map.entry(k).or_insert_with(f);
&*(inserted as *const _)
};
ret
}
pub fn insert_with_key(&self, k: K, f: impl FnOnce(&K) -> V) -> &V::Target {
let mut map = self.map.write().unwrap();
let ret = unsafe {
let inserted = &**map.entry(k).or_insert_with_key(f);
&*(inserted as *const _)
};
ret
}
pub fn get<Q>(&self, k: &Q) -> Option<&V::Target>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
let map = self.map.read().unwrap();
let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) };
ret
}
pub fn map_get<Q, T, F>(&self, k: &Q, f: F) -> Option<T>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
F: FnOnce(&V) -> T,
{
let map = self.map.read().unwrap();
let ret = map.get(k).map(f);
ret
}
}
impl<K, V> FrozenMap<K, V> {
pub fn into_tuple_vec(self) -> Vec<(K, V)> {
self.map
.into_inner()
.unwrap()
.into_iter()
.collect::<Vec<_>>()
}
pub fn len(&self) -> usize {
let map = self.map.read().unwrap();
map.len()
}
pub fn is_empty(&self) -> bool {
let map = self.map.read().unwrap();
map.is_empty()
}
}
impl<K: Clone, V> FrozenMap<K, V> {
pub fn keys_cloned(&self) -> Vec<K> {
self.map.read().unwrap().keys().cloned().collect()
}
}
impl<K: Eq + Hash, V: Copy> FrozenMap<K, V> {
pub fn get_copy<Q>(&self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
let map = self.map.read().unwrap();
map.get(k).cloned()
}
pub fn get_copy_or_insert(&self, k: K, v: V) -> V {
let mut map = self.map.write().unwrap();
let inserted = map.entry(k).or_insert(v);
*inserted
}
pub fn get_copy_or_insert_with(&self, k: K, f: impl FnOnce() -> V) -> V {
let mut map = self.map.write().unwrap();
let inserted = map.entry(k).or_insert_with(f);
*inserted
}
pub fn get_copy_or_insert_with_key(&self, k: K, f: impl FnOnce(&K) -> V) -> V {
let mut map = self.map.write().unwrap();
let inserted = map.entry(k).or_insert_with_key(f);
*inserted
}
}
impl<K, V> std::convert::AsMut<HashMap<K, V>> for FrozenMap<K, V> {
fn as_mut(&mut self) -> &mut HashMap<K, V> {
self.map.get_mut().unwrap()
}
}
impl<K: Clone, V: Clone> Clone for FrozenMap<K, V> {
fn clone(&self) -> Self {
Self {
map: self.map.read().unwrap().clone().into(),
}
}
}
impl<K: Eq + Hash, V: PartialEq> PartialEq for FrozenMap<K, V> {
fn eq(&self, other: &Self) -> bool {
let self_ref: &HashMap<K, V> = &self.map.read().unwrap();
let other_ref: &HashMap<K, V> = &other.map.read().unwrap();
self_ref == other_ref
}
}
pub struct FrozenVec<T> {
vec: RwLock<Vec<T>>,
}
impl<T: fmt::Debug> fmt::Debug for FrozenVec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.vec.try_read() {
Ok(guard) => guard.fmt(f),
Err(TryLockError::Poisoned(err)) => {
f.debug_tuple("FrozenMap").field(&&**err.get_ref()).finish()
}
Err(TryLockError::WouldBlock) => {
struct LockedPlaceholder;
impl fmt::Debug for LockedPlaceholder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("<locked>")
}
}
f.debug_tuple("FrozenMap")
.field(&LockedPlaceholder)
.finish()
}
}
}
}
impl<T> FrozenVec<T> {
pub fn len(&self) -> usize {
let vec = self.vec.read().unwrap();
vec.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl<T: StableDeref> FrozenVec<T> {
pub const fn new() -> Self {
Self {
vec: RwLock::new(Vec::new()),
}
}
pub fn push(&self, val: T) {
let mut vec = self.vec.write().unwrap();
vec.push(val);
}
pub fn push_get(&self, val: T) -> &T::Target {
let mut vec = self.vec.write().unwrap();
vec.push(val);
unsafe { &*(&**vec.get_unchecked(vec.len() - 1) as *const T::Target) }
}
pub fn push_get_index(&self, val: T) -> usize {
let mut vec = self.vec.write().unwrap();
let index = vec.len();
vec.push(val);
index
}
pub fn get(&self, index: usize) -> Option<&T::Target> {
let vec = self.vec.read().unwrap();
unsafe { vec.get(index).map(|x| &*(&**x as *const T::Target)) }
}
pub fn iter(&self) -> Iter<'_, T> {
self.into_iter()
}
}
#[derive(Debug)]
pub struct Iter<'a, T> {
vec: &'a FrozenVec<T>,
idx: usize,
}
impl<'a, T: StableDeref> Iterator for Iter<'a, T> {
type Item = &'a T::Target;
fn next(&mut self) -> Option<&'a T::Target> {
if let Some(ret) = self.vec.get(self.idx) {
self.idx += 1;
Some(ret)
} else {
None
}
}
}
impl<'a, T: StableDeref> IntoIterator for &'a FrozenVec<T> {
type Item = &'a T::Target;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
Iter { vec: self, idx: 0 }
}
}
#[test]
fn test_iteration() {
let vec = vec!["a", "b", "c", "d"];
let frozen: FrozenVec<_> = vec.clone().into();
assert_eq!(vec, frozen.iter().collect::<Vec<_>>());
for (e1, e2) in vec.iter().zip(frozen.iter()) {
assert_eq!(*e1, e2);
}
assert_eq!(vec.len(), frozen.iter().count())
}
impl<T> FrozenVec<T> {
pub fn into_vec(self) -> Vec<T> {
self.vec.into_inner().unwrap()
}
}
impl<T> std::convert::AsMut<Vec<T>> for FrozenVec<T> {
fn as_mut(&mut self) -> &mut Vec<T> {
self.vec.get_mut().unwrap()
}
}
impl<T> Default for FrozenVec<T> {
fn default() -> Self {
Self {
vec: Default::default(),
}
}
}
impl<T: Clone> Clone for FrozenVec<T> {
fn clone(&self) -> Self {
Self {
vec: self.vec.read().unwrap().clone().into(),
}
}
}
impl<T: PartialEq> PartialEq for FrozenVec<T> {
fn eq(&self, other: &Self) -> bool {
let self_ref: &Vec<T> = &self.vec.read().unwrap();
let other_ref: &Vec<T> = &other.vec.read().unwrap();
self_ref == other_ref
}
}
const fn buffer_size(idx: usize) -> usize {
3 << (idx * 2)
}
const fn prior_total_buffer_size(idx: usize) -> usize {
(1 << (idx * 2)) - 1
}
const fn buffer_index(i: usize) -> usize {
(((usize::BITS + 1 - (i + 1).leading_zeros()) >> 1) - 1) as usize
}
const NUM_BUFFERS: usize = (usize::BITS / 2) as usize;
pub struct LockFreeFrozenVec<T: Copy> {
data: [AtomicPtr<T>; NUM_BUFFERS],
len: AtomicUsize,
locked: AtomicBool,
}
impl<T: Copy> Drop for LockFreeFrozenVec<T> {
fn drop(&mut self) {
for i in 0..NUM_BUFFERS {
let layout = Self::layout(buffer_size(i));
unsafe {
let ptr = *self.data[i].get_mut();
if ptr.is_null() {
break;
} else {
std::alloc::dealloc(ptr.cast(), layout);
}
}
}
}
}
impl<T: Copy> Default for LockFreeFrozenVec<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: Copy> LockFreeFrozenVec<T> {
const NULL_ATOMIC_PTR: AtomicPtr<T> = AtomicPtr::new(std::ptr::null_mut());
const fn null() -> [AtomicPtr<T>; NUM_BUFFERS] {
[Self::NULL_ATOMIC_PTR; NUM_BUFFERS]
}
pub const fn new() -> Self {
Self {
data: Self::null(),
len: AtomicUsize::new(0),
locked: AtomicBool::new(false),
}
}
fn lock<U>(&self, f: impl FnOnce() -> U) -> U {
while self.locked.swap(true, Ordering::Acquire) {
std::hint::spin_loop();
}
let ret = f();
self.locked.store(false, Ordering::Release);
ret
}
fn layout(cap: usize) -> Layout {
Layout::array::<T>(cap).unwrap()
}
const NOT_ZST: () = if std::mem::size_of::<T>() == 0 {
panic!("`LockFreeFrozenVec` cannot be used with ZSTs");
};
pub fn push(&self, val: T) -> usize {
#[allow(path_statements)]
{
Self::NOT_ZST
}
self.lock(|| {
let len = self.len();
let buffer_idx = buffer_index(len);
let mut ptr = self.data[buffer_idx].load(Ordering::Acquire);
if ptr.is_null() {
let layout = Self::layout(buffer_size(buffer_idx));
unsafe {
ptr = std::alloc::alloc(layout).cast::<T>();
}
assert!(!ptr.is_null());
self.data[buffer_idx].store(ptr, Ordering::Release);
}
let local_index = len - prior_total_buffer_size(buffer_idx);
unsafe {
ptr.add(local_index).write(val);
}
self.len.store(len + 1, Ordering::Release);
len
})
}
pub fn get(&self, index: usize) -> Option<T> {
if index >= self.len() {
return None;
}
let buffer_idx = buffer_index(index);
let buffer_ptr = self.data[buffer_idx].load(Ordering::Acquire);
let local_index = index - prior_total_buffer_size(buffer_idx);
Some(unsafe { *buffer_ptr.add(local_index) })
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline(always)]
pub fn len(&self) -> usize {
self.len.load(Ordering::Acquire)
}
#[inline]
pub unsafe fn get_unchecked(&self, index: usize) -> T {
let buffer_idx = buffer_index(index);
let buffer_ptr = self.data[buffer_idx].load(Ordering::Relaxed);
let local_index = index - prior_total_buffer_size(buffer_idx);
unsafe { *buffer_ptr.add(local_index) }
}
fn for_each_buffer(&self, mut func: impl FnMut(&[T], usize)) {
for buffer_index in 0..NUM_BUFFERS {
if let Some(buffer_ptr) = NonNull::new(self.data[buffer_index].load(Ordering::Acquire))
{
let buffer_size = buffer_size(buffer_index);
let buffer_slice =
unsafe { std::slice::from_raw_parts(buffer_ptr.as_ptr(), buffer_size) };
func(buffer_slice, buffer_index);
} else {
break;
}
}
}
}
impl<T: Copy + PartialEq> PartialEq for LockFreeFrozenVec<T> {
fn eq(&self, other: &Self) -> bool {
let self_len = self.len();
let other_len = other.len();
if self_len != other_len {
return false;
}
for index in 0..self_len {
if self.get(index) != other.get(index) {
return false;
}
}
true
}
}
#[test]
fn test_non_lockfree_unchecked() {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
struct Moo(i32);
let vec = LockFreeFrozenVec::new();
let idx_set = std::sync::Mutex::new(std::collections::HashSet::new());
std::thread::scope(|s| {
s.spawn(|| {
for i in 0..1000 {
idx_set.lock().unwrap().insert(vec.push(Moo(i)));
}
});
s.spawn(|| {
for i in 0..1000 {
idx_set.lock().unwrap().insert(vec.push(Moo(i)));
}
});
for _ in 0..2000 {
let idxes = std::mem::take(&mut *idx_set.lock().unwrap());
for idx in idxes {
unsafe {
vec.get_unchecked(idx);
}
}
}
});
LockFreeFrozenVec::<()>::new();
}
impl<T: Copy + Clone> Clone for LockFreeFrozenVec<T> {
fn clone(&self) -> Self {
let mut coppied_data = Self::null();
self.for_each_buffer(|buffer_slice, buffer_index| {
let layout = Self::layout(buffer_slice.len());
let new_buffer_ptr = unsafe { std::alloc::alloc(layout).cast::<T>() };
assert!(!new_buffer_ptr.is_null());
unsafe {
std::ptr::copy_nonoverlapping(
buffer_slice.as_ptr(),
new_buffer_ptr,
buffer_slice.len(),
);
};
*coppied_data[buffer_index].get_mut() = new_buffer_ptr;
});
Self {
data: coppied_data,
len: AtomicUsize::new(self.len()),
locked: AtomicBool::new(false),
}
}
}
#[test]
fn test_non_lockfree() {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
struct Moo(i32);
let vec = LockFreeFrozenVec::new();
assert_eq!(vec.get(1), None);
vec.push(Moo(1));
let i = vec.push(Moo(2));
vec.push(Moo(3));
assert_eq!(vec.get(i), Some(Moo(2)));
std::thread::scope(|s| {
s.spawn(|| {
for i in 0..1000 {
vec.push(Moo(i));
}
});
s.spawn(|| {
for i in 0..1000 {
vec.push(Moo(i));
}
});
for i in 0..2000 {
while vec.get(i).is_none() {}
}
});
{
let vec2 = vec.clone();
assert_eq!(vec2.get(0), Some(Moo(1)));
assert_eq!(vec2.get(1), Some(Moo(2)));
assert_eq!(vec2.get(2), Some(Moo(3)));
}
{
let large_vec = LockFreeFrozenVec::new();
for i in 0..1000 {
large_vec.push(Moo(i));
}
let large_vec_2 = large_vec.clone();
for i in 0..1000 {
assert_eq!(large_vec_2.get(i), Some(Moo(i as i32)));
}
}
{
let empty_vec = LockFreeFrozenVec::<()>::new();
let empty_vec_2 = empty_vec.clone();
assert_eq!(empty_vec_2.get(0), None);
}
LockFreeFrozenVec::<()>::new();
}
#[derive(Debug)]
pub struct FrozenBTreeMap<K, V>(RwLock<BTreeMap<K, V>>);
impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> {
pub const fn new() -> Self {
Self(RwLock::new(BTreeMap::new()))
}
pub fn get<Q>(&self, k: &Q) -> Option<&V::Target>
where
K: Borrow<Q>,
Q: Ord + ?Sized,
{
let map = self.0.read().unwrap();
let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) };
ret
}
pub fn insert(&self, k: K, v: V) -> &V::Target {
let mut map = self.0.write().unwrap();
let ret = unsafe {
let inserted = &**map.entry(k).or_insert(v);
&*(inserted as *const _)
};
ret
}
pub fn map_get<Q, T, F>(&self, k: &Q, f: F) -> Option<T>
where
K: Borrow<Q>,
Q: Ord + ?Sized,
F: FnOnce(&V) -> T,
{
let map = self.0.read().unwrap();
let ret = map.get(k).map(f);
ret
}
}
impl<K, V> FrozenBTreeMap<K, V> {
pub fn into_tuple_vec(self) -> Vec<(K, V)> {
self.0.into_inner().unwrap().into_iter().collect::<Vec<_>>()
}
pub fn len(&self) -> usize {
let map = self.0.read().unwrap();
map.len()
}
pub fn is_empty(&self) -> bool {
let map = self.0.read().unwrap();
map.is_empty()
}
}
impl<K: Clone + Ord, V: StableDeref> From<BTreeMap<K, V>> for FrozenBTreeMap<K, V> {
fn from(map: BTreeMap<K, V>) -> Self {
Self(RwLock::new(map))
}
}
impl<Q: ?Sized, K, V> Index<&Q> for FrozenBTreeMap<K, V>
where
Q: Ord,
K: Clone + Ord + Borrow<Q>,
V: StableDeref,
{
type Output = V::Target;
fn index(&self, idx: &Q) -> &V::Target {
self.get(idx)
.expect("attempted to index FrozenBTreeMap with unknown key")
}
}
impl<K: Clone + Ord, V: StableDeref> FromIterator<(K, V)> for FrozenBTreeMap<K, V> {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = (K, V)>,
{
let map: BTreeMap<_, _> = iter.into_iter().collect();
map.into()
}
}
impl<K: Clone + Ord, V: StableDeref> Default for FrozenBTreeMap<K, V> {
fn default() -> Self {
Self::new()
}
}
impl<K: Clone, V: Clone> Clone for FrozenBTreeMap<K, V> {
fn clone(&self) -> Self {
Self(self.0.read().unwrap().clone().into())
}
}
impl<K: PartialEq, V: PartialEq> PartialEq for FrozenBTreeMap<K, V> {
fn eq(&self, other: &Self) -> bool {
let self_ref: &BTreeMap<K, V> = &self.0.read().unwrap();
let other_ref: &BTreeMap<K, V> = &other.0.read().unwrap();
self_ref == other_ref
}
}