use super::ebr::{Arc, AtomicArc, Barrier, Ptr, Tag};
use super::hash_table::cell::{EntryIterator, Locker};
use super::hash_table::cell_array::CellArray;
use super::hash_table::HashTable;
use std::borrow::Borrow;
use std::collections::hash_map::RandomState;
use std::hash::{BuildHasher, Hash};
use std::iter::FusedIterator;
use std::sync::atomic::AtomicU8;
use std::sync::atomic::Ordering::{Acquire, Relaxed};
pub struct HashIndex<K, V, H = RandomState>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: BuildHasher,
{
array: AtomicArc<CellArray<K, V, true>>,
minimum_capacity: usize,
resize_mutex: AtomicU8,
build_hasher: H,
}
impl<K, V, H> HashIndex<K, V, H>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: BuildHasher,
{
pub fn new(capacity: usize, build_hasher: H) -> HashIndex<K, V, H> {
let initial_capacity = capacity.max(Self::default_capacity());
HashIndex {
array: AtomicArc::from(Arc::new(CellArray::<K, V, true>::new(
initial_capacity,
AtomicArc::null(),
))),
minimum_capacity: initial_capacity,
resize_mutex: AtomicU8::new(0),
build_hasher,
}
}
#[inline]
pub fn insert(&self, key: K, val: V) -> Result<(), (K, V)> {
self.insert_entry(key, val)
}
#[inline]
pub fn remove<Q>(&self, key_ref: &Q) -> bool
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.remove_if(key_ref, |_| true)
}
#[inline]
pub fn remove_if<Q, F: FnOnce(&V) -> bool>(&self, key_ref: &Q, condition: F) -> bool
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.remove_entry(key_ref, condition).1
}
#[inline]
pub fn read<Q, R, F: FnOnce(&K, &V) -> R>(&self, key_ref: &Q, reader: F) -> Option<R>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
let barrier = Barrier::new();
self.read_with(key_ref, reader, &barrier)
}
#[inline]
pub fn read_with<'b, Q, R, F: FnOnce(&'b K, &'b V) -> R>(
&self,
key_ref: &Q,
reader: F,
barrier: &'b Barrier,
) -> Option<R>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.read_entry(key_ref, reader, barrier)
}
#[inline]
pub fn contains<Q>(&self, key: &Q) -> bool
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.read(key, |_, _| ()).is_some()
}
pub fn clear(&self) -> usize {
let mut num_removed = 0;
let barrier = Barrier::new();
let mut current_array_ptr = self.array.load(Acquire, &barrier);
while let Some(current_array_ref) = current_array_ptr.as_ref() {
if !current_array_ref.old_array(&barrier).is_null() {
while !current_array_ref.partial_rehash(
|k| self.hash(k),
|k, v| Some((k.clone(), v.clone())),
&barrier,
) {
current_array_ptr = self.array.load(Acquire, &barrier);
continue;
}
}
for index in 0..current_array_ref.num_cells() {
if let Some(mut locker) = Locker::lock(current_array_ref.cell(index), &barrier) {
num_removed += locker.cell_ref().num_entries();
locker.purge(&barrier);
}
}
let new_current_array_ptr = self.array.load(Acquire, &barrier);
if current_array_ptr == new_current_array_ptr {
self.resize(&barrier);
break;
}
current_array_ptr = new_current_array_ptr;
}
num_removed
}
#[inline]
pub fn len(&self) -> usize {
self.num_entries(&Barrier::new())
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn capacity(&self) -> usize {
self.num_slots(&Barrier::new())
}
pub fn iter<'h, 'b>(&'h self, barrier: &'b Barrier) -> Visitor<'h, 'b, K, V, H> {
Visitor {
hash_index: self,
current_array_ptr: Ptr::null(),
current_index: 0,
current_entry_iterator: None,
barrier_ref: barrier,
}
}
}
impl<K, V> Default for HashIndex<K, V, RandomState>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
{
fn default() -> Self {
HashIndex {
array: AtomicArc::from(Arc::new(CellArray::<K, V, true>::new(
Self::default_capacity(),
AtomicArc::null(),
))),
minimum_capacity: Self::default_capacity(),
resize_mutex: AtomicU8::new(0),
build_hasher: RandomState::new(),
}
}
}
impl<K, V, H> Drop for HashIndex<K, V, H>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: BuildHasher,
{
fn drop(&mut self) {
self.clear();
let barrier = Barrier::new();
let current_array_ptr = self.array.load(Acquire, &barrier);
if let Some(current_array_ref) = current_array_ptr.as_ref() {
current_array_ref.drop_old_array(&barrier);
if let Some(current_array) = self.array.swap((None, Tag::None), Relaxed) {
barrier.reclaim(current_array);
}
}
}
}
impl<K, V, H> HashTable<K, V, H, true> for HashIndex<K, V, H>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: BuildHasher,
{
fn hasher(&self) -> &H {
&self.build_hasher
}
fn copier(key: &K, val: &V) -> Option<(K, V)> {
Some((key.clone(), val.clone()))
}
fn cell_array(&self) -> &AtomicArc<CellArray<K, V, true>> {
&self.array
}
fn minimum_capacity(&self) -> usize {
self.minimum_capacity
}
fn resize_mutex(&self) -> &AtomicU8 {
&self.resize_mutex
}
}
pub struct Visitor<'h, 'b, K, V, H>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: BuildHasher,
{
hash_index: &'h HashIndex<K, V, H>,
current_array_ptr: Ptr<'b, CellArray<K, V, true>>,
current_index: usize,
current_entry_iterator: Option<EntryIterator<'b, K, V, true>>,
barrier_ref: &'b Barrier,
}
impl<'h, 'b, K, V, H> Iterator for Visitor<'h, 'b, K, V, H>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: 'static + BuildHasher,
{
type Item = (&'b K, &'b V);
fn next(&mut self) -> Option<Self::Item> {
if self.current_array_ptr.is_null() {
let current_array_ptr = self.hash_index.array.load(Acquire, self.barrier_ref);
let current_array_ref = current_array_ptr.as_ref().unwrap();
let old_array_ptr = current_array_ref.old_array(self.barrier_ref);
self.current_array_ptr = if old_array_ptr.is_null() {
current_array_ptr
} else {
old_array_ptr
};
let cell_ref = self.current_array_ptr.as_ref().unwrap().cell(0);
self.current_entry_iterator
.replace(EntryIterator::new(cell_ref, self.barrier_ref));
}
loop {
if let Some(iterator) = self.current_entry_iterator.as_mut() {
if let Some(entry) = iterator.next() {
return Some((&entry.0 .0, &entry.0 .1));
}
}
let array_ref = self.current_array_ptr.as_ref().unwrap();
self.current_index += 1;
if self.current_index == array_ref.num_cells() {
let current_array_ptr = self.hash_index.array.load(Acquire, self.barrier_ref);
if self.current_array_ptr == current_array_ptr {
break;
}
let current_array_ref = current_array_ptr.as_ref().unwrap();
let old_array_ptr = current_array_ref.old_array(self.barrier_ref);
if self.current_array_ptr == old_array_ptr {
self.current_array_ptr = current_array_ptr;
self.current_index = 0;
self.current_entry_iterator.replace(EntryIterator::new(
current_array_ref.cell(0),
self.barrier_ref,
));
continue;
}
self.current_array_ptr = if old_array_ptr.is_null() {
current_array_ptr
} else {
old_array_ptr
};
self.current_index = 0;
self.current_entry_iterator.replace(EntryIterator::new(
self.current_array_ptr.as_ref().unwrap().cell(0),
self.barrier_ref,
));
continue;
}
self.current_entry_iterator.replace(EntryIterator::new(
array_ref.cell(self.current_index),
self.barrier_ref,
));
}
None
}
}
impl<'h, 'b, K, V, H> FusedIterator for Visitor<'h, 'b, K, V, H>
where
K: 'static + Clone + Eq + Hash + Sync,
V: 'static + Clone + Sync,
H: 'static + BuildHasher,
{
}