use static_assertions::const_assert_eq;
use std::array as StdArray;
use std::cmp::Ordering;
use std::fmt as StdFmt;
use std::ptr as StdPtr;
use std::sync::atomic::{AtomicPtr, AtomicU8, AtomicU64};
use seize::Guard;
use crate::leaf_trait::TreeInternode;
use crate::nodeversion::NodeVersion;
use crate::ordering::{READ_ORD, RELAXED, WRITE_ORD};
use crate::prefetch::prefetch_read;
mod accessors;
mod layout;
pub const WIDTH: usize = 15;
const NUM_CHILDREN: usize = WIDTH + 1;
#[repr(C, align(64))]
pub struct InternodeNode {
version: NodeVersion,
nkeys: AtomicU8,
height: u8,
_pad: [u8; 2],
parent: AtomicPtr<u8>,
ikey0: [AtomicU64; WIDTH],
child: [AtomicPtr<u8>; NUM_CHILDREN], }
impl StdFmt::Debug for InternodeNode {
fn fmt(&self, f: &mut StdFmt::Formatter<'_>) -> StdFmt::Result {
f.debug_struct("InternodeNode")
.field("nkeys", &self.nkeys())
.field("height", &self.height)
.field(
"has_parent",
&(!unsafe { self.parent_unguarded() }.is_null()),
)
.finish_non_exhaustive()
}
}
const_assert_eq!(std::mem::align_of::<InternodeNode>(), 64);
impl InternodeNode {
#[inline]
pub unsafe fn init_at(ptr: *mut Self, height: u32) {
debug_assert!(height <= 15, "init_at: height {height} exceeds maximum 15");
unsafe {
StdPtr::write_bytes(ptr, 0, 1);
let node = &mut *ptr;
StdPtr::write(&raw mut node.version, NodeVersion::new(false));
#[expect(clippy::cast_possible_truncation, reason = "height <= 15 in practice")]
{
node.height = height as u8;
}
}
}
#[inline]
pub unsafe fn init_at_root(ptr: *mut Self, height: u32) {
unsafe {
Self::init_at(ptr, height);
(*ptr).version.mark_root();
}
}
#[inline]
pub unsafe fn init_at_for_split(ptr: *mut Self, parent_version: &NodeVersion, height: u32) {
unsafe {
StdPtr::write_bytes(ptr, 0, 1);
let node: &mut Self = &mut *ptr;
let split_version: NodeVersion = NodeVersion::new_for_split(parent_version);
StdPtr::write(&raw mut node.version, split_version);
#[expect(clippy::cast_possible_truncation, reason = "height <= 15 in practice")]
{
node.height = height as u8;
}
}
}
#[must_use]
#[inline]
pub fn new(height: u32) -> Box<Self> {
#[expect(clippy::cast_possible_truncation, reason = "height <= 15 in practice")]
Box::new(Self {
version: NodeVersion::new(false), nkeys: AtomicU8::new(0),
height: height as u8,
_pad: [0; 2],
parent: AtomicPtr::new(StdPtr::null_mut()),
ikey0: StdArray::from_fn(|_| AtomicU64::new(0)),
child: StdArray::from_fn(|_| AtomicPtr::new(StdPtr::null_mut())),
})
}
#[must_use]
#[inline(always)]
pub fn new_root(height: u32) -> Box<Self> {
let node: Box<Self> = Self::new(height);
node.version.mark_root();
node
}
#[must_use]
#[inline]
pub fn new_for_split(parent_version: &NodeVersion, height: u32) -> Box<Self> {
let split_version: NodeVersion = NodeVersion::new_for_split(parent_version);
#[expect(clippy::cast_possible_truncation, reason = "height <= 15 in practice")]
Box::new(Self {
version: split_version,
nkeys: AtomicU8::new(0),
height: height as u8,
_pad: [0; 2],
parent: AtomicPtr::new(StdPtr::null_mut()),
ikey0: StdArray::from_fn(|_| AtomicU64::new(0)),
child: StdArray::from_fn(|_| AtomicPtr::new(StdPtr::null_mut())),
})
}
#[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
pub fn insert_key_and_child(&self, p: usize, new_ikey: u64, new_child: *mut u8) {
let n: usize = self.nkeys.load(RELAXED) as usize;
debug_assert!(n < WIDTH, "insert_key_and_child: node is full");
debug_assert!(
p <= n,
"insert_key_and_child: position {p} out of bounds (n={n})"
);
for i in (p..n).rev() {
let key: u64 = self.ikey0[i].load(RELAXED);
self.ikey0[i + 1].store(key, RELAXED);
let child: *mut u8 = self.child[i + 1].load(RELAXED);
self.child[i + 2].store(child, WRITE_ORD);
}
self.ikey0[p].store(new_ikey, RELAXED);
self.child[p + 1].store(new_child, WRITE_ORD);
#[expect(clippy::cast_possible_truncation)]
self.nkeys.store((n + 1) as u8, WRITE_ORD);
}
#[inline(always)]
#[expect(
clippy::indexing_slicing,
reason = "bounds checked by caller via split logic"
)]
pub unsafe fn shift_from(&self, dst_pos: usize, src: &Self, src_pos: usize, count: usize) {
if count == 0 {
return;
}
for i in 0..count {
let key: u64 = src.ikey0[src_pos + i].load(RELAXED);
self.ikey0[dst_pos + i].store(key, RELAXED);
let child: *mut u8 = src.child[src_pos + 1 + i].load(RELAXED);
self.child[dst_pos + 1 + i].store(child, WRITE_ORD);
}
}
#[must_use = "popup_key must be inserted into parent node to complete the split"]
#[expect(
clippy::cast_possible_truncation,
reason = "WIDTH <= 15, so mid and WIDTH-mid fit in u8"
)]
pub fn split_into(
&self,
new_right: &mut Self,
new_right_ptr: *mut Self,
insert_pos: usize,
insert_ikey: u64,
insert_child: *mut u8,
) -> (u64, bool) {
debug_assert!(
self.nkeys.load(RELAXED) as usize == WIDTH,
"split_into: node must be full"
);
debug_assert!(
new_right.nkeys.load(RELAXED) == 0,
"split_into: new_right must be empty"
);
let mid: usize = WIDTH.div_ceil(2);
let (popup_key, insert_went_left) = match insert_pos.cmp(&mid) {
Ordering::Less => {
new_right.set_child(0, unsafe { self.child_unguarded(mid) });
unsafe { new_right.shift_from(0, self, mid, WIDTH - mid) };
new_right.nkeys.store((WIDTH - mid) as u8, WRITE_ORD);
let popup: u64 = self.ikey_relaxed(mid - 1);
self.nkeys.store((mid - 1) as u8, WRITE_ORD);
self.insert_key_and_child(insert_pos, insert_ikey, insert_child);
(popup, true)
}
Ordering::Equal => {
new_right.set_child(0, insert_child);
unsafe { new_right.shift_from(0, self, mid, WIDTH - mid) };
new_right.nkeys.store((WIDTH - mid) as u8, WRITE_ORD);
self.nkeys.store(mid as u8, WRITE_ORD);
(insert_ikey, false)
}
Ordering::Greater => {
let right_insert_pos: usize = insert_pos - (mid + 1);
new_right.set_child(0, unsafe { self.child_unguarded(mid + 1) });
unsafe { new_right.shift_from(0, self, mid + 1, right_insert_pos) };
new_right.set_ikey_relaxed(right_insert_pos, insert_ikey);
new_right.set_child(right_insert_pos + 1, insert_child);
let count_after: usize = WIDTH - insert_pos;
unsafe {
new_right.shift_from(right_insert_pos + 1, self, insert_pos, count_after);
};
new_right.nkeys.store((WIDTH - mid) as u8, WRITE_ORD);
let popup: u64 = self.ikey_relaxed(mid);
self.nkeys.store(mid as u8, WRITE_ORD);
(popup, false)
}
};
new_right.height = self.height;
if self.height > 0 {
let nr_nkeys: usize = new_right.nkeys.load(RELAXED) as usize;
let new_right_ptr_u8: *mut u8 = new_right_ptr.cast::<u8>();
for i in 0..=nr_nkeys {
let child: *mut u8 = unsafe { new_right.child_unguarded(i) };
debug_assert!(!child.is_null(), "split_into: null child at index {i}");
#[expect(
clippy::cast_ptr_alignment,
reason = "height > 0 guarantees children are InternodeNode with 64-byte alignment"
)]
unsafe {
(*child.cast::<Self>()).set_parent(new_right_ptr_u8);
}
}
}
(popup_key, insert_went_left)
}
#[must_use]
#[inline(always)]
pub fn parent(&self, guard: &impl Guard) -> *mut u8 {
guard.protect(&self.parent, READ_ORD)
}
#[must_use]
#[inline(always)]
pub unsafe fn parent_unguarded(&self) -> *mut u8 {
self.parent.load(READ_ORD)
}
#[inline(always)]
pub fn set_parent(&self, parent: *mut u8) {
self.parent.store(parent, WRITE_ORD);
}
#[must_use]
#[inline(always)]
pub fn is_root(&self) -> bool {
self.version.is_root()
}
#[must_use]
#[inline(always)]
pub fn compare_key(&self, search_ikey: u64, p: usize) -> Ordering {
search_ikey.cmp(&self.ikey(p))
}
#[inline]
#[expect(
clippy::indexing_slicing,
reason = "n = nkeys() <= WIDTH-1 < WIDTH, so i+3 < i+4 <= n < WIDTH is always in bounds"
)]
pub fn find_insert_position(&self, insert_ikey: u64) -> usize {
let n: usize = self.nkeys();
if n > 6 {
prefetch_read(&raw const self.ikey0[6]);
}
let mut i: usize = 0;
while (i + 4) <= n {
if (i == 8) && (n > 13) {
prefetch_read(&raw const self.ikey0[14]);
}
if self.ikey0[i].load(RELAXED) >= insert_ikey {
return i;
}
if self.ikey0[i + 1].load(RELAXED) >= insert_ikey {
return i + 1;
}
if self.ikey0[i + 2].load(RELAXED) >= insert_ikey {
return i + 2;
}
if self.ikey0[i + 3].load(RELAXED) >= insert_ikey {
return i + 3;
}
i += 4;
}
while i < n {
if self.ikey0[i].load(RELAXED) >= insert_ikey {
return i;
}
i += 1;
}
n
}
#[cfg(debug_assertions)]
pub fn debug_assert_invariants(&self) {
assert!(
self.nkeys() <= WIDTH,
"nkeys {} exceeds WIDTH {}",
self.nkeys(),
WIDTH
);
let size: usize = self.size();
if size > 1 {
for i in 1..size {
assert!(
self.ikey_relaxed(i - 1) < self.ikey_relaxed(i),
"keys not in ascending order: ikey[{}] ({:#x}) >= ikey[{}] ({:#x})",
i - 1,
self.ikey_relaxed(i - 1),
i,
self.ikey_relaxed(i)
);
}
}
}
#[cfg(not(debug_assertions))]
#[inline]
pub const fn debug_assert_invariants(&self) {}
}
unsafe impl Send for InternodeNode {}
unsafe impl Sync for InternodeNode {}
impl TreeInternode for InternodeNode {
const WIDTH: usize = WIDTH;
#[inline(always)]
fn version(&self) -> &NodeVersion {
Self::version(self)
}
#[inline(always)]
fn height(&self) -> u32 {
Self::height(self)
}
#[inline(always)]
fn children_are_leaves(&self) -> bool {
Self::children_are_leaves(self)
}
#[inline(always)]
fn nkeys(&self) -> usize {
Self::nkeys(self)
}
#[inline(always)]
fn nkeys_relaxed(&self) -> usize {
Self::nkeys_relaxed(self)
}
#[inline(always)]
fn set_nkeys(&self, n: u8) {
Self::set_nkeys(self, n);
}
#[inline(always)]
fn inc_nkeys(&self) {
Self::inc_nkeys(self);
}
#[inline(always)]
fn is_full(&self) -> bool {
Self::is_full(self)
}
#[inline(always)]
fn ikey(&self, idx: usize) -> u64 {
Self::ikey(self, idx)
}
#[inline(always)]
#[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
fn ikey_relaxed(&self, i: usize) -> u64 {
debug_assert!(i < WIDTH, "ikey_relaxed: index {i} out of bounds");
self.ikey0[i].load(RELAXED)
}
#[inline(always)]
fn ikey_ptr(&self) -> *const u64 {
self.ikey0.as_ptr().cast::<u64>()
}
#[inline(always)]
fn set_ikey(&self, idx: usize, key: u64) {
Self::set_ikey(self, idx, key);
}
#[inline(always)]
fn compare_key(&self, search_ikey: u64, p: usize) -> Ordering {
Self::compare_key(self, search_ikey, p)
}
#[inline(always)]
fn find_insert_position(&self, insert_ikey: u64) -> usize {
Self::find_insert_position(self, insert_ikey)
}
#[inline(always)]
fn child(&self, idx: usize) -> *mut u8 {
unsafe { Self::child_unguarded(self, idx) }
}
#[inline(always)]
fn set_child(&self, idx: usize, child: *mut u8) {
Self::set_child(self, idx, child);
}
#[inline(always)]
fn assign(&self, p: usize, ikey: u64, right_child: *mut u8) {
Self::assign(self, p, ikey, right_child);
}
#[inline(always)]
fn insert_key_and_child(&self, p: usize, new_ikey: u64, new_child: *mut u8) {
Self::insert_key_and_child(self, p, new_ikey, new_child);
}
#[inline(always)]
fn parent(&self) -> *mut u8 {
unsafe { Self::parent_unguarded(self) }
}
#[inline(always)]
fn set_parent(&self, parent: *mut u8) {
Self::set_parent(self, parent);
}
#[inline(always)]
fn is_root(&self) -> bool {
Self::is_root(self)
}
#[inline(always)]
fn shift_from(&self, dst_pos: usize, src: &Self, src_pos: usize, count: usize) {
unsafe { Self::shift_from(self, dst_pos, src, src_pos, count) };
}
#[inline(always)]
fn split_into(
&self,
new_right: &mut Self,
new_right_ptr: *mut Self,
insert_pos: usize,
insert_ikey: u64,
insert_child: *mut u8,
) -> (u64, bool) {
Self::split_into(
self,
new_right,
new_right_ptr,
insert_pos,
insert_ikey,
insert_child,
)
}
}
#[cfg(test)]
mod unit_tests;
#[cfg(loom)]
mod loom_tests;