use std::convert::AsRef;
use std::fmt;
use std::ops::RangeBounds;
use std::sync::atomic::Ordering::{AcqRel, Acquire, Relaxed, Release};
use sdd::{AtomicRaw, Owned, PrivateCollector, RawPtr};
use super::internal_node::InternalNode;
use super::internal_node::Locker as InternalNodeLocker;
use super::leaf::{InsertResult, Iter, Leaf, RemoveResult, RevIter};
use super::leaf_node::LeafNode;
use crate::utils::{LockPager, deref_unchecked, get_owned, retire, undefined};
use crate::{Comparable, Guard};
#[repr(align(64))]
pub enum Node<K, V> {
Internal(InternalNode<K, V>),
Leaf(LeafNode<K, V>),
}
impl<K, V> Node<K, V> {
#[inline]
pub(super) fn new_leaf_node() -> Owned<Self> {
unsafe { Owned::new_with_unchecked(|| Self::Leaf(LeafNode::new())) }
}
#[inline]
pub(super) fn new_internal_node() -> Owned<Self> {
unsafe { Owned::new_with_unchecked(|| Self::Internal(InternalNode::new())) }
}
#[inline]
pub(super) fn new_internal_node_frozen() -> Owned<Self> {
unsafe { Owned::new_with_unchecked(|| Self::Internal(InternalNode::new_frozen())) }
}
#[inline]
pub(super) fn new_leaf_node_frozen() -> Owned<Self> {
unsafe { Owned::new_with_unchecked(|| Self::Leaf(LeafNode::new_frozen())) }
}
#[inline]
pub(super) fn depth(&self, depth: usize, guard: &Guard) -> usize {
match &self {
Self::Internal(internal_node) => internal_node.depth(depth, guard),
Self::Leaf(_) => depth,
}
}
#[inline]
pub(super) fn is_retired(&self) -> bool {
match &self {
Self::Internal(internal_node) => internal_node.is_retired(),
Self::Leaf(leaf_node) => leaf_node.is_retired(),
}
}
#[inline]
pub(super) fn cleanup_needed(&self) -> bool {
if let Self::Internal(internal_node) = &self {
internal_node.children.is_empty()
} else {
false
}
}
}
impl<K, V> Node<K, V>
where
K: Clone + Ord,
{
#[inline]
pub(super) fn search_entry<'g, Q>(&'g self, key: &Q, guard: &'g Guard) -> Option<(&'g K, &'g V)>
where
Q: Comparable<K> + ?Sized,
{
match &self {
Self::Internal(internal_node) => internal_node.search_entry(key, guard),
Self::Leaf(leaf_node) => leaf_node.search_entry(key, guard),
}
}
#[inline]
pub(super) fn search_value<'g, Q>(&'g self, key: &Q, guard: &'g Guard) -> Option<&'g V>
where
Q: Comparable<K> + ?Sized,
{
match &self {
Self::Internal(internal_node) => internal_node.search_value(key, guard),
Self::Leaf(leaf_node) => leaf_node.search_value(key, guard),
}
}
#[inline]
pub fn read_entry<Q, R, F: FnOnce(&K, &V) -> R, P: LockPager>(
&self,
key: &Q,
reader: F,
pager: &mut P,
guard: &Guard,
) -> Result<Option<R>, F>
where
Q: Comparable<K> + ?Sized,
{
match &self {
Self::Internal(internal_node) => internal_node.read_entry(key, reader, pager, guard),
Self::Leaf(leaf_node) => leaf_node.read_entry(key, reader, pager, guard),
}
}
#[inline]
pub(super) fn min<'g>(&self, guard: &'g Guard) -> Option<Iter<'g, K, V>> {
match &self {
Self::Internal(internal_node) => internal_node.min(guard),
Self::Leaf(leaf_node) => leaf_node.min(guard),
}
}
#[inline]
pub(super) fn max<'g>(&self, guard: &'g Guard) -> Option<RevIter<'g, K, V>> {
match &self {
Self::Internal(internal_node) => internal_node.max(guard),
Self::Leaf(leaf_node) => leaf_node.max(guard),
}
}
#[inline]
pub(super) fn approximate<'g, Q, const LE: bool>(
&self,
key: &Q,
guard: &'g Guard,
) -> Option<Iter<'g, K, V>>
where
Q: Comparable<K> + ?Sized,
{
match &self {
Self::Internal(internal_node) => internal_node.approximate::<_, LE>(key, guard),
Self::Leaf(leaf_node) => leaf_node.approximate::<_, LE>(key, guard),
}
}
#[inline]
pub(super) fn insert<P: LockPager, const UPSERT: bool>(
&self,
key: K,
val: V,
private_collector: &PrivateCollector,
pager: &mut P,
guard: &Guard,
) -> Result<InsertResult<K, V>, (K, V)> {
match &self {
Self::Internal(internal_node) => {
internal_node.insert::<P, UPSERT>(key, val, private_collector, pager, guard)
}
Self::Leaf(leaf_node) => {
leaf_node.insert::<P, UPSERT>(key, val, private_collector, pager, guard)
}
}
}
#[inline]
pub(super) fn remove_if<Q, F: FnMut(&V) -> bool, P>(
&self,
key: &Q,
condition: &mut F,
private_collector: &PrivateCollector,
pager: &mut P,
guard: &Guard,
) -> Result<RemoveResult, ()>
where
Q: Comparable<K> + ?Sized,
P: LockPager,
{
match &self {
Self::Internal(internal_node) => {
internal_node.remove_if(key, condition, private_collector, pager, guard)
}
Self::Leaf(leaf_node) => {
leaf_node.remove_if(key, condition, private_collector, pager, guard)
}
}
}
#[allow(clippy::too_many_arguments)]
#[inline]
pub(super) fn remove_range<'g, Q, R: RangeBounds<Q>, P: LockPager>(
&self,
range: &R,
start_unbounded: bool,
valid_lower_max_leaf: Option<&'g Leaf<K, V>>,
valid_upper_min_node: Option<&'g Node<K, V>>,
private_collector: &PrivateCollector,
pager: &mut P,
guard: &'g Guard,
) -> Result<usize, ()>
where
Q: Comparable<K> + ?Sized,
{
match &self {
Self::Internal(internal_node) => internal_node.remove_range(
range,
start_unbounded,
valid_lower_max_leaf,
valid_upper_min_node,
private_collector,
pager,
guard,
),
Self::Leaf(leaf_node) => leaf_node.remove_range(
range,
start_unbounded,
valid_lower_max_leaf,
valid_upper_min_node,
private_collector,
pager,
guard,
),
}
}
pub(super) fn split_root<P: LockPager>(
root_ptr: RawPtr<Node<K, V>>,
root: &AtomicRaw<Node<K, V>>,
private_collector: &PrivateCollector,
pager: &mut P,
guard: &Guard,
) {
if let Some(old_root) = deref_unchecked(root_ptr) {
let new_root = if old_root.is_retired() {
Node::new_leaf_node()
} else {
let internal_node = Node::new_internal_node();
let Node::Internal(node) = internal_node.as_ref() else {
undefined();
};
node.unbounded_child.store(root_ptr, Relaxed);
internal_node
};
let new_root_ptr = new_root.into_raw();
if root
.compare_exchange(root_ptr, new_root_ptr, Release, Relaxed, guard)
.is_err()
{
if let Some(Node::Internal(new_internal_node)) =
get_owned(new_root_ptr).as_ref().map(AsRef::as_ref)
{
new_internal_node
.unbounded_child
.store(RawPtr::null(), Relaxed);
}
} else if let Some(Node::Internal(new_internal_node)) = deref_unchecked(new_root_ptr) {
let _: Result<bool, ()> = new_internal_node.split_node(
root_ptr,
&new_internal_node.unbounded_child,
pager,
private_collector,
guard,
);
} else {
retire::<K, _>(root_ptr, private_collector, guard);
}
}
}
pub(super) fn cleanup_root<P: LockPager>(
root: &AtomicRaw<Node<K, V>>,
private_collector: &PrivateCollector,
pager: &mut P,
guard: &Guard,
) -> bool {
let mut root_ptr = root.load(Acquire, guard);
while let Some(root_ref) = deref_unchecked(root_ptr) {
if root_ref.is_retired() {
if let Err(new_root_ptr) =
root.compare_exchange(root_ptr, RawPtr::null(), AcqRel, Acquire, guard)
{
root_ptr = new_root_ptr;
continue;
}
retire::<K, _>(root_ptr, private_collector, guard);
break;
}
let Node::Internal(internal_node) = root_ref else {
break;
};
if !internal_node.children.is_empty() {
break;
}
let locker = match pager.try_acquire::<false>(&internal_node.lock) {
Ok(true) => InternalNodeLocker {
node: internal_node,
},
Ok(false) => {
continue;
}
Err(()) => return false,
};
let new_root_ptr = if internal_node.children.is_empty() {
internal_node.unbounded_child.load(Acquire, guard)
} else {
break;
};
match root.compare_exchange(root_ptr, new_root_ptr, AcqRel, Acquire, guard) {
Ok(_) => {
internal_node.children.freeze::<true>();
locker.unlock_retire();
retire::<K, _>(root_ptr, private_collector, guard);
root_ptr = new_root_ptr;
guard.accelerate();
}
Err(new_root_ptr) => {
root_ptr = new_root_ptr;
}
}
}
true
}
}
impl<K, V> fmt::Debug for Node<K, V>
where
K: Clone + fmt::Debug + Ord,
V: fmt::Debug,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Internal(internal_node) => internal_node.fmt(f),
Self::Leaf(leaf_node) => leaf_node.fmt(f),
}
}
}