pub struct LeafNode24<S: ValueSlot> { /* private fields */ }Expand description
Leaf node with 24 slots using u128 permutation.
§Concurrency Model
Uses optimistic concurrency control (OCC) via NodeVersion for readers,
and lock-based writes. The AtomicPermuter24 permutation field enables
lock-free slot ordering updates.
§Memory Layout (896 bytes, 14 cache lines)
Offset Size Field
------ ---- -----
0 4B version (NodeVersion)
4 1B modstate
5 55B _pad0 (cache line isolation)
64 16B permutation (AtomicPermuter24)
80 48B _pad1 (cache line isolation)
128 192B ikey0[24] (24 × 8B)
320 24B keylenx[24]
344 192B leaf_values[24] (24 × 8B)
536 318B inline_ksuf (InlineSuffixBag)
854 2B implicit padding
856 8B external_ksuf
864 8B next
872 8B prev
880 8B parent
888 8B tail padding (align to 64B)Implementations§
Source§impl<S: ValueSlot> LeafNode24<S>
impl<S: ValueSlot> LeafNode24<S>
Sourcepub fn new_with_root(is_root: bool) -> Self
pub fn new_with_root(is_root: bool) -> Self
Create a new leaf node (unboxed).
Sourcepub unsafe fn init_at(ptr: *mut Self, is_root: bool)
pub unsafe fn init_at(ptr: *mut Self, is_root: bool)
Initialize a leaf node directly at the given pointer.
This avoids stack allocation and copy by writing directly to the destination. Used by pool allocators for maximum performance.
§Safety
ptrmust be valid, properly aligned, and point to uninitialized memoryptrmust have space forsize_of::<Self>()bytes
Sourcepub fn make_layer_root(&self)
pub fn make_layer_root(&self)
Convert this leaf into a layer root.
Sets up the node to serve as the root of a sub-layer:
- Sets parent pointer to null
- Marks version as root
NOTE: This matches LeafNode::make_layer_root in src/leaf/layer.rs.
SAFETY: Caller must ensure this node is not currently part of another tree structure, or that appropriate synchronization is in place.
Sourcepub fn new_layer_root() -> Box<Self>
pub fn new_layer_root() -> Box<Self>
Create a new leaf node configured as a layer root.
Used when creating sublayers for keys longer than 8 bytes.
Sourcepub const fn version(&self) -> &NodeVersion
pub const fn version(&self) -> &NodeVersion
Get a reference to the node’s version.
Sourcepub const fn version_mut(&mut self) -> &mut NodeVersion
pub const fn version_mut(&mut self) -> &mut NodeVersion
Get a mutable reference to the node’s version.
Sourcepub fn ikey(&self, slot: usize) -> u64
pub fn ikey(&self, slot: usize) -> u64
Get the ikey at the given physical slot.
Uses Acquire ordering to synchronize with writer’s Release stores.
§Panics
Panics in debug mode if slot >= WIDTH_24.
Sourcepub fn ikey_relaxed(&self, slot: usize) -> u64
pub fn ikey_relaxed(&self, slot: usize) -> u64
Get the ikey at the given physical slot using Relaxed ordering.
§Safety Justification
Safe to use Relaxed when:
- Caller has already loaded permutation with Acquire ordering, which synchronizes with the writer’s Release fence after modifications
- OCC version validation at the end of the read catches any races
This avoids redundant Acquire fences on each ikey load (up to 24 per search), improving read throughput by 10-15%.
§Panics
Panics in debug mode if slot >= WIDTH_24.
Sourcepub fn load_all_ikeys(&self) -> [u64; 24]
pub fn load_all_ikeys(&self) -> [u64; 24]
Load all ikeys into a contiguous buffer for SIMD search.
Sourcepub fn prefetch(&self)
pub fn prefetch(&self)
Prefetch leaf node data for range scans.
Brings the node’s key arrays (ikey0, keylenx) and value pointers
(leaf_values) into CPU cache before they’re accessed, reducing memory
latency during sequential scanning.
§Memory Layout (WIDTH=24)
Offset Size Field
------ ---- -----
0 64B Cache line 0: version + modstate + padding
64 64B Cache line 1: permutation (u128) + padding
128 192B ikey0 (24 × 8B = 192B, ~3 cache lines)
320 24B keylenx (24 × 1B)
344 192B leaf_values (24 × 8B = 192B, ~3 cache lines)§C++ Reference
Matches C++ leaf::prefetch() pattern from masstree_scan.hh:195, 299.
Sourcepub fn prefetch_ikey(&self, slot: usize)
pub fn prefetch_ikey(&self, slot: usize)
Prefetch the ikey at the given slot into CPU cache.
This is used during linear search to hide memory latency by prefetching future ikeys while processing current ones.
§Arguments
slot- Physical slot index(0..WIDTH_24)
§Safety
The slot must be in range [0, WIDTH_24). No bounds check in release mode.
Sourcepub fn set_keylenx(&self, slot: usize, keylenx: u8)
pub fn set_keylenx(&self, slot: usize, keylenx: u8)
Set the keylenx at the given physical slot.
Sourcepub fn ikey_bound(&self) -> u64
pub fn ikey_bound(&self) -> u64
Get the ikey bound (ikey at slot 0, used for B-link tree routing).
Sourcepub fn keylenx_bound(&self) -> u8
pub fn keylenx_bound(&self) -> u8
Get the keylenx bound for this leaf.
Sourcepub const fn keylenx_is_layer(keylenx: u8) -> bool
pub const fn keylenx_is_layer(keylenx: u8) -> bool
Check if keylenx indicates a layer pointer (static helper).
Sourcepub const fn keylenx_has_ksuf(keylenx: u8) -> bool
pub const fn keylenx_has_ksuf(keylenx: u8) -> bool
Check if keylenx indicates suffix storage (static helper).
Sourcepub fn external_ksuf_ptr(&self) -> *mut SuffixBag<WIDTH_24>
pub fn external_ksuf_ptr(&self) -> *mut SuffixBag<WIDTH_24>
Load external suffix bag pointer (reader).
Sourcepub fn has_external_ksuf(&self) -> bool
pub fn has_external_ksuf(&self) -> bool
Check if this leaf has external suffix storage allocated.
Sourcepub fn ksuf(&self, slot: usize) -> Option<&[u8]>
pub fn ksuf(&self, slot: usize) -> Option<&[u8]>
Get the suffix for a slot (checks inline first, then external).
§Safety Note
Caller must ensure suffix storage is stable via version validation or lock.
Sourcepub fn ksuf_or_empty(&self, slot: usize) -> &[u8] ⓘ
pub fn ksuf_or_empty(&self, slot: usize) -> &[u8] ⓘ
Get the suffix for a slot, or an empty slice if none.
Sourcepub unsafe fn assign_ksuf(
&self,
slot: usize,
suffix: &[u8],
guard: &LocalGuard<'_>,
)
pub unsafe fn assign_ksuf( &self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>, )
Assign a suffix to a slot (two-tier: inline first, then external).
This uses the C++ Masstree optimization: try inline storage first, only allocate external storage when inline is full.
§Safety
- Caller must hold lock and have called
mark_insert() guardmust come from this tree’s collector
Sourcepub unsafe fn assign_ksuf_init(
&self,
slot: usize,
suffix: &[u8],
guard: &LocalGuard<'_>,
)
pub unsafe fn assign_ksuf_init( &self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>, )
Assign a suffix during node initialization (e.g., splits).
Unlike assign_ksuf, this assumes slots 0..slot are already filled
sequentially and doesn’t rely on the permutation.
§Safety
- Caller must hold lock or node must be unpublished
guardmust come from this tree’s collector- Slots 0..slot must already be filled sequentially
Sourcepub unsafe fn try_assign_ksuf(
&self,
slot: usize,
suffix: &[u8],
guard: &LocalGuard<'_>,
) -> AllocResult<()>
pub unsafe fn try_assign_ksuf( &self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>, ) -> AllocResult<()>
Try to assign a suffix to a slot, returning error on allocation failure.
This is the fallible version of assign_ksuf. It tries inline storage
first, then falls back to external storage, returning an error if
allocation fails.
§Safety
- Caller must hold lock and have called
mark_insert() guardmust come from this tree’s collector
§Errors
Returns Err(AllocError) if external bag allocation fails.
Sourcepub unsafe fn try_assign_ksuf_init(
&self,
slot: usize,
suffix: &[u8],
guard: &LocalGuard<'_>,
) -> AllocResult<()>
pub unsafe fn try_assign_ksuf_init( &self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>, ) -> AllocResult<()>
Sourcepub unsafe fn clear_ksuf(&self, slot: usize, _guard: &LocalGuard<'_>)
pub unsafe fn clear_ksuf(&self, slot: usize, _guard: &LocalGuard<'_>)
Clear the suffix from a slot (no allocation needed!).
Unlike the old copy-on-write approach, this just marks the slot as empty in both inline and external storage. No cloning required.
§Safety
- Caller must hold lock and have called
mark_insert() guardmust come from this tree’s collector (unused but kept for API compat)
Sourcepub fn ksuf_equals(&self, slot: usize, suffix: &[u8]) -> bool
pub fn ksuf_equals(&self, slot: usize, suffix: &[u8]) -> bool
Check if a slot’s suffix equals the given suffix.
Sourcepub fn ksuf_compare(&self, slot: usize, suffix: &[u8]) -> Option<Ordering>
pub fn ksuf_compare(&self, slot: usize, suffix: &[u8]) -> Option<Ordering>
Compare a slot’s suffix with the given suffix.
Sourcepub fn ksuf_matches(&self, slot: usize, ikey: u64, suffix: &[u8]) -> bool
pub fn ksuf_matches(&self, slot: usize, ikey: u64, suffix: &[u8]) -> bool
Check if a slot’s key matches the given key.
Sourcepub fn ksuf_match_result(&self, slot: usize, keylenx: u8, suffix: &[u8]) -> i32
pub fn ksuf_match_result(&self, slot: usize, keylenx: u8, suffix: &[u8]) -> i32
Match result for layer-aware key comparison.
Returns:
MATCH_RESULT_EXACT(1) - Exact matchMATCH_RESULT_MISMATCH(0) - Same ikey but different keyMATCH_RESULT_LAYER(-8) - Slot is a layer pointer
Sourcepub unsafe fn compact_ksuf(
&self,
exclude_slot: Option<usize>,
guard: &LocalGuard<'_>,
) -> usize
pub unsafe fn compact_ksuf( &self, exclude_slot: Option<usize>, guard: &LocalGuard<'_>, ) -> usize
Compact external suffix storage.
Note: Inline storage doesn’t need compaction (fixed size, no fragmentation). This only compacts the external bag if it exists.
§Safety
- Caller must hold lock
- The
guardmust be valid and from the same collector as the tree.
Sourcepub fn leaf_value_ptr(&self, slot: usize) -> *mut u8
pub fn leaf_value_ptr(&self, slot: usize) -> *mut u8
Load leaf value pointer at the given slot.
Sourcepub fn set_leaf_value_ptr(&self, slot: usize, ptr: *mut u8)
pub fn set_leaf_value_ptr(&self, slot: usize, ptr: *mut u8)
Store leaf value pointer at the given slot.
Sourcepub fn take_leaf_value_ptr(&self, slot: usize) -> *mut u8
pub fn take_leaf_value_ptr(&self, slot: usize) -> *mut u8
Take the leaf value pointer, leaving null in the slot.
Sourcepub fn is_slot_empty(&self, slot: usize) -> bool
pub fn is_slot_empty(&self, slot: usize) -> bool
Check if a slot is empty (value pointer is null).
Sourcepub fn permutation(&self) -> Permuter24
pub fn permutation(&self) -> Permuter24
Load permutation with Acquire ordering.
Sourcepub fn set_permutation(&self, perm: Permuter24)
pub fn set_permutation(&self, perm: Permuter24)
Store permutation with Release ordering.
Sourcepub fn permutation_raw(&self) -> u128
pub fn permutation_raw(&self) -> u128
Get raw permutation value (for debugging).
Sourcepub fn cas_slot_value(
&self,
slot: usize,
expected: *mut u8,
new_value: *mut u8,
) -> Result<(), *mut u8>
pub fn cas_slot_value( &self, slot: usize, expected: *mut u8, new_value: *mut u8, ) -> Result<(), *mut u8>
Sourcepub fn load_slot_value(&self, slot: usize) -> *mut u8
pub fn load_slot_value(&self, slot: usize) -> *mut u8
Load the current value pointer at a slot.
Sourcepub unsafe fn store_key_data_for_cas(&self, slot: usize, ikey: u64, keylenx: u8)
pub unsafe fn store_key_data_for_cas(&self, slot: usize, ikey: u64, keylenx: u8)
Store key metadata (ikey, keylenx) for a CAS insert attempt.
§Safety
- The caller must have successfully claimed the slot via
cas_slot_valueand ensured the slot still belongs to the CAS attempt (i.e.leaf_values[slot]still equals the claimed pointer).
Note: writing key metadata before claiming the slot is not safe in this design because multiple concurrent CAS attempts can overwrite each other’s metadata before publish.
Sourcepub fn safe_next(&self, guard: &impl Guard) -> *mut Self
pub fn safe_next(&self, guard: &impl Guard) -> *mut Self
Get the next leaf pointer, masking the mark bit.
Uses guard protection to ensure the load participates in seize’s total order, making it safe on all architectures.
Sourcepub unsafe fn safe_next_unguarded(&self) -> *mut Self
pub unsafe fn safe_next_unguarded(&self) -> *mut Self
Get the next leaf pointer without guard protection.
§Safety
Caller must ensure the next pointer’s target won’t be retired during use. Valid when:
- Called during
Drop(no concurrent access) - Called in teardown after
reclaim_all() - Caller holds locks that prevent retirement
Sourcepub fn next_raw(&self, guard: &impl Guard) -> *mut Self
pub fn next_raw(&self, guard: &impl Guard) -> *mut Self
Get the raw next pointer (including mark bit).
Uses guard protection to ensure the load participates in seize’s total order, making it safe on all architectures.
Sourcepub unsafe fn next_raw_unguarded(&self) -> *mut Self
pub unsafe fn next_raw_unguarded(&self) -> *mut Self
Get the raw next pointer without guard protection.
§Safety
Caller must ensure the next pointer’s target won’t be retired during use.
Sourcepub fn next_is_marked(&self) -> bool
pub fn next_is_marked(&self) -> bool
Check if the next pointer is marked (split in progress).
Sourcepub fn unmark_next(&self)
pub fn unmark_next(&self)
Unmark the next pointer.
§Safety Note
Uses unguarded load internally since we’re modifying our own field during a locked operation, not traversing to a different node.
Sourcepub fn wait_for_split(&self)
pub fn wait_for_split(&self)
Wait for an in-progress split to complete.
Spins until the next pointer is unmarked, the version is stable, OR the node is marked as deleted.
§Note
A marked next pointer can mean either:
- A split is in progress (will be unmarked when split completes)
- An unlink is in progress (leaf being deleted, may stay marked)
We check is_deleted() to avoid spinning forever on case 2.
Sourcepub unsafe fn unlink_from_chain(&self)
pub unsafe fn unlink_from_chain(&self)
Unlink this leaf from the B-link doubly-linked chain.
This is the inverse of link_sibling. Used when removing
an empty leaf from the tree.
§Algorithm (from C++ btree_leaflink.hh:76-96)
- Lock our next pointer via CAS marking
- CAS prev->next from self to marked(self) to signal unlinking
- Update next->prev = prev
- Release fence for visibility
- Store prev->next = next (unmarked), completing the unlink
§Preconditions
- Self is locked (caller holds version lock)
- Self has a predecessor (
previs non-null)
§Safety
- Caller must hold the version lock on this leaf
self.prev()must be non-null (not the leftmost leaf)- The prev and next pointers must be valid leaves
Sourcepub fn prev(&self, guard: &impl Guard) -> *mut Self
pub fn prev(&self, guard: &impl Guard) -> *mut Self
Get the previous leaf pointer.
Uses guard protection to ensure the load participates in seize’s total order, making it safe on all architectures.
Sourcepub unsafe fn prev_unguarded(&self) -> *mut Self
pub unsafe fn prev_unguarded(&self) -> *mut Self
Get the previous leaf pointer without guard protection.
§Safety
Caller must ensure the prev pointer’s target won’t be retired during use.
Sourcepub fn parent(&self, guard: &impl Guard) -> *mut u8
pub fn parent(&self, guard: &impl Guard) -> *mut u8
Get the parent pointer.
Uses guard protection to ensure the load participates in seize’s total order, making it safe on all architectures.
Sourcepub unsafe fn parent_unguarded(&self) -> *mut u8
pub unsafe fn parent_unguarded(&self) -> *mut u8
Get the parent pointer without guard protection.
§Safety
Caller must ensure the parent pointer’s target won’t be retired during use.
Sourcepub fn set_parent(&self, parent: *mut u8)
pub fn set_parent(&self, parent: *mut u8)
Set the parent pointer.
Sourcepub fn modstate(&self) -> u8
pub fn modstate(&self) -> u8
Get the modification state.
Returns one of:
MODSTATE_INSERT(0): Normal insert modeMODSTATE_REMOVE(1): Node is being removedMODSTATE_DELETED_LAYER(2): Layer has been garbage collected
Sourcepub fn set_modstate(&self, state: u8)
pub fn set_modstate(&self, state: u8)
Set the modification state.
Sourcepub fn deleted_layer(&self) -> bool
pub fn deleted_layer(&self) -> bool
Check if this layer has been deleted (garbage collected).
This is distinct from version.is_deleted():
is_deleted()means the node itself is removed from the treedeleted_layer()means the sublayer this node was root of has been gc’d
When deleted_layer() is true, readers should reset their key position
(unshift_all) and retry from the main tree root.
§C++ Reference
Matches leaf::deleted_layer() in masstree_struct.hh:456-458.
Sourcepub fn mark_deleted_layer(&self)
pub fn mark_deleted_layer(&self)
Mark this layer as deleted (for gc_layer).
Called when garbage collecting an empty sublayer. The parent’s slot that pointed to this sublayer will be cleared, and this leaf is marked so concurrent readers know to retry from the tree root.
§C++ Reference
Matches setting modstate_ = modstate_deleted_layer in C++.
Sourcepub fn mark_remove(&self)
pub fn mark_remove(&self)
Mark this node as being in remove mode.
Called at the start of a remove operation to prevent suffix allocation during the remove process.
§C++ Reference
Matches the modstate transition in finish_remove (masstree_remove.hh:162-166).
Sourcepub fn is_removing(&self) -> bool
pub fn is_removing(&self) -> bool
Check if this node is in remove mode.
Sourcepub fn is_empty_state(&self) -> bool
pub fn is_empty_state(&self) -> bool
Check if this leaf is in empty state (modstate == MODSTATE_EMPTY).
Empty state means the leaf had all its keys removed and is available for reuse by insert or cleanup by the coalescing background task.
Sourcepub fn mark_empty(&self)
pub fn mark_empty(&self)
Mark this leaf as empty (all keys removed).
Called when the last key is removed from a leaf. The leaf remains in the tree structure but is marked for potential reuse or cleanup.
Empty leaves can be:
- Reused by insert operations (saves allocation)
- Cleaned up by background coalescing task
Sourcepub fn clear_empty_state(&self)
pub fn clear_empty_state(&self)
Clear empty state, returning to normal insert mode.
Called when an empty leaf is being reused for a new insert. This resets the modstate to allow normal operation.
Sourcepub fn can_reuse_slot0(&self, new_ikey: u64) -> bool
pub fn can_reuse_slot0(&self, new_ikey: u64) -> bool
Check if slot 0 can be reused for a new key.
§Safety
Called under exclusive lock - uses unguarded prev load.
Sourcepub fn clear_slot(&self, slot: usize)
pub fn clear_slot(&self, slot: usize)
Clear a slot completely, removing any value or layer pointer.
This is used by gc_layer when cleaning up an empty sublayer.
The parent leaf’s slot that pointed to the sublayer is cleared.
§Memory Ordering
Uses Release ordering to ensure the clear is visible to subsequent readers. The permutation should be updated separately to remove this slot from the logical ordering.
§Safety
The caller must ensure:
- The leaf is locked
- The slot is valid (0..WIDTH)
- Any value/layer at this slot has been or will be properly retired
Sourcepub fn clear_slot_and_permutation(&self, slot: usize)
pub fn clear_slot_and_permutation(&self, slot: usize)
Clear a slot and update permutation atomically.
This is a convenience method that:
- Clears the slot contents
- Removes the slot from the permutation
§Safety
The caller must ensure the leaf is locked.
Trait Implementations§
Source§impl<S: ValueSlot> Debug for LeafNode24<S>
impl<S: ValueSlot> Debug for LeafNode24<S>
Source§impl<S: ValueSlot> Drop for LeafNode24<S>
impl<S: ValueSlot> Drop for LeafNode24<S>
Source§impl<V: Send + Sync + 'static> LayerCapableLeaf<LeafValue<V>> for LeafNode24<LeafValue<V>>
impl<V: Send + Sync + 'static> LayerCapableLeaf<LeafValue<V>> for LeafNode24<LeafValue<V>>
Source§impl<V: Copy + Send + Sync + 'static> LayerCapableLeaf<LeafValueIndex<V>> for LeafNode24<LeafValueIndex<V>>
impl<V: Copy + Send + Sync + 'static> LayerCapableLeaf<LeafValueIndex<V>> for LeafNode24<LeafValueIndex<V>>
Source§fn try_clone_output(&self, slot: usize) -> Option<V>
fn try_clone_output(&self, slot: usize) -> Option<V>
Source§unsafe fn assign_from_key_arc(
&self,
slot: usize,
key: &Key<'_>,
value: Option<V>,
guard: &LocalGuard<'_>,
)
unsafe fn assign_from_key_arc( &self, slot: usize, key: &Key<'_>, value: Option<V>, guard: &LocalGuard<'_>, )
Source§impl<S: ValueSlot> LeafValueClear<S> for LeafNode24<S>
impl<S: ValueSlot> LeafValueClear<S> for LeafNode24<S>
Source§fn clear_value_output(&self, slot: usize, guard: &LocalGuard<'_>)
fn clear_value_output(&self, slot: usize, guard: &LocalGuard<'_>)
slot. Read moreSource§impl<S: ValueSlot> LeafValueLoad<S> for LeafNode24<S>
impl<S: ValueSlot> LeafValueLoad<S> for LeafNode24<S>
Source§impl<S: ValueSlot> LeafValueStore<S> for LeafNode24<S>
impl<S: ValueSlot> LeafValueStore<S> for LeafNode24<S>
Source§fn store_value_output(
&self,
slot: usize,
output: &<S as ValueSlot>::Output,
_guard: &LocalGuard<'_>,
)
fn store_value_output( &self, slot: usize, output: &<S as ValueSlot>::Output, _guard: &LocalGuard<'_>, )
slot. Read moreSource§impl<S: ValueSlot> LeafValueTake<S> for LeafNode24<S>
impl<S: ValueSlot> LeafValueTake<S> for LeafNode24<S>
Source§fn take_value_output(
&self,
slot: usize,
guard: &LocalGuard<'_>,
) -> Option<<S as ValueSlot>::Output>
fn take_value_output( &self, slot: usize, guard: &LocalGuard<'_>, ) -> Option<<S as ValueSlot>::Output>
slot, returning the old output. Read moreSource§impl<S: ValueSlot> LeafValueUpdate<S> for LeafNode24<S>
impl<S: ValueSlot> LeafValueUpdate<S> for LeafNode24<S>
Source§fn replace_value_output(
&self,
slot: usize,
new_output: <S as ValueSlot>::Output,
guard: &LocalGuard<'_>,
) -> <S as ValueSlot>::Output
fn replace_value_output( &self, slot: usize, new_output: <S as ValueSlot>::Output, guard: &LocalGuard<'_>, ) -> <S as ValueSlot>::Output
slot, returning the old output. Read moreSource§impl<S> NodeAllocatorGeneric<S, LeafNode24<S>> for SeizeAllocator24<S>
impl<S> NodeAllocatorGeneric<S, LeafNode24<S>> for SeizeAllocator24<S>
Source§fn alloc_leaf_direct(
&self,
is_root: bool,
is_layer_root: bool,
) -> *mut LeafNode24<S>
fn alloc_leaf_direct( &self, is_root: bool, is_layer_root: bool, ) -> *mut LeafNode24<S>
Allocate a leaf directly without Box intermediate.
Uses raw allocation + init_at to avoid stack-to-heap copy.
Source§fn try_alloc_leaf(
&self,
is_root: bool,
is_layer_root: bool,
) -> AllocResult<*mut LeafNode24<S>>
fn try_alloc_leaf( &self, is_root: bool, is_layer_root: bool, ) -> AllocResult<*mut LeafNode24<S>>
Try to allocate a leaf node, returning an error on failure.
Source§fn alloc_internode_direct(&self, height: u32) -> *mut u8
fn alloc_internode_direct(&self, height: u32) -> *mut u8
Allocate an internode directly without Box intermediate.
Source§fn alloc_internode_direct_root(&self, height: u32) -> *mut u8
fn alloc_internode_direct_root(&self, height: u32) -> *mut u8
Allocate an internode as root directly without Box intermediate.
Source§fn alloc_internode_direct_for_split(
&self,
parent_version: &NodeVersion,
height: u32,
) -> *mut u8
fn alloc_internode_direct_for_split( &self, parent_version: &NodeVersion, height: u32, ) -> *mut u8
Allocate an internode for split directly without Box intermediate.
Source§fn alloc_leaf(&self, node: Box<LeafNode24<S>>) -> *mut LeafNode24<S>
fn alloc_leaf(&self, node: Box<LeafNode24<S>>) -> *mut LeafNode24<S>
Source§fn track_leaf(&self, _ptr: *mut LeafNode24<S>)
fn track_leaf(&self, _ptr: *mut LeafNode24<S>)
Source§unsafe fn retire_leaf(&self, ptr: *mut LeafNode24<S>, guard: &LocalGuard<'_>)
unsafe fn retire_leaf(&self, ptr: *mut LeafNode24<S>, guard: &LocalGuard<'_>)
Source§fn alloc_internode_erased(&self, node_ptr: *mut u8) -> *mut u8
fn alloc_internode_erased(&self, node_ptr: *mut u8) -> *mut u8
Source§fn track_internode_erased(&self, _ptr: *mut u8)
fn track_internode_erased(&self, _ptr: *mut u8)
Source§unsafe fn retire_internode_erased(&self, ptr: *mut u8, guard: &LocalGuard<'_>)
unsafe fn retire_internode_erased(&self, ptr: *mut u8, guard: &LocalGuard<'_>)
Source§fn teardown_tree(&self, root_ptr: *mut u8)
fn teardown_tree(&self, root_ptr: *mut u8)
Source§unsafe fn retire_subtree_root(&self, root_ptr: *mut u8, guard: &LocalGuard<'_>)
unsafe fn retire_subtree_root(&self, root_ptr: *mut u8, guard: &LocalGuard<'_>)
root_ptr. Read moreSource§impl<S: ValueSlot + Send + Sync + 'static> TreeLeafNode<S> for LeafNode24<S>
impl<S: ValueSlot + Send + Sync + 'static> TreeLeafNode<S> for LeafNode24<S>
Source§const SPLIT_THRESHOLD: usize = 19usize
const SPLIT_THRESHOLD: usize = 19usize
80% of 24 = 19.2, use 19 to trigger splits earlier
Source§fn find_ikey_matches(&self, target_ikey: u64) -> u32
fn find_ikey_matches(&self, target_ikey: u64) -> u32
SIMD-accelerated ikey matching for WIDTH=24.
Uses load_all_ikeys() + SIMD comparison instead of
sequential per-slot atomic loads.
Source§fn safe_next(&self) -> *mut Self
fn safe_next(&self) -> *mut Self
§Safety
Trait methods use unguarded loads - intended for locked operations.
Source§fn prev(&self) -> *mut Self
fn prev(&self) -> *mut Self
§Safety
Trait methods use unguarded loads - intended for locked operations.
Source§fn parent(&self) -> *mut u8
fn parent(&self) -> *mut u8
§Safety
Trait methods use unguarded loads - intended for locked operations.
Source§fn next_raw(&self) -> *mut Self
fn next_raw(&self) -> *mut Self
§Safety
Trait methods use unguarded loads - intended for locked operations.
Source§type Perm = Permuter24
type Perm = Permuter24
Source§type Internode = InternodeNode
type Internode = InternodeNode
Source§fn new_root_boxed() -> Box<Self>
fn new_root_boxed() -> Box<Self>
Source§fn new_layer_root_boxed() -> Box<Self>
fn new_layer_root_boxed() -> Box<Self>
Source§fn version(&self) -> &NodeVersion
fn version(&self) -> &NodeVersion
Source§fn permutation(&self) -> Permuter24
fn permutation(&self) -> Permuter24
Source§fn set_permutation(&self, perm: Permuter24)
fn set_permutation(&self, perm: Permuter24)
Source§fn permutation_raw(&self) -> u128
fn permutation_raw(&self) -> u128
Source§fn ikey(&self, slot: usize) -> u64
fn ikey(&self, slot: usize) -> u64
Source§fn ikey_relaxed(&self, slot: usize) -> u64
fn ikey_relaxed(&self, slot: usize) -> u64
Source§fn ikey_bound(&self) -> u64
fn ikey_bound(&self) -> u64
Source§fn set_keylenx(&self, slot: usize, keylenx: u8)
fn set_keylenx(&self, slot: usize, keylenx: u8)
Source§fn cas_slot_value(
&self,
slot: usize,
expected: *mut u8,
new_value: *mut u8,
) -> Result<(), *mut u8>
fn cas_slot_value( &self, slot: usize, expected: *mut u8, new_value: *mut u8, ) -> Result<(), *mut u8>
Source§fn clear_slot(&self, slot: usize)
fn clear_slot(&self, slot: usize)
Source§fn clear_slot_and_permutation(&self, slot: usize)
fn clear_slot_and_permutation(&self, slot: usize)
Source§fn next_is_marked(&self) -> bool
fn next_is_marked(&self) -> bool
Source§fn unmark_next(&self)
fn unmark_next(&self)
Source§unsafe fn unlink_from_chain(&self)
unsafe fn unlink_from_chain(&self)
Source§fn set_parent(&self, parent: *mut u8)
fn set_parent(&self, parent: *mut u8)
Source§fn can_reuse_slot0(&self, new_ikey: u64) -> bool
fn can_reuse_slot0(&self, new_ikey: u64) -> bool
Source§fn load_slot_value(&self, slot: usize) -> *mut u8
fn load_slot_value(&self, slot: usize) -> *mut u8
Source§fn wait_for_split(&self)
fn wait_for_split(&self)
Source§fn calculate_split_point(
&self,
_insert_pos: usize,
insert_ikey: u64,
) -> Option<SplitPoint>
fn calculate_split_point( &self, _insert_pos: usize, insert_ikey: u64, ) -> Option<SplitPoint>
Source§unsafe fn split_into_preallocated(
&self,
split_pos: usize,
new_leaf_ptr: *mut Self,
guard: &LocalGuard<'_>,
) -> (u64, InsertTarget)
unsafe fn split_into_preallocated( &self, split_pos: usize, new_leaf_ptr: *mut Self, guard: &LocalGuard<'_>, ) -> (u64, InsertTarget)
split_pos using a pre-allocated target. Read moreSource§unsafe fn split_all_to_right_preallocated(
&self,
new_leaf_ptr: *mut Self,
guard: &LocalGuard<'_>,
) -> (u64, InsertTarget)
unsafe fn split_all_to_right_preallocated( &self, new_leaf_ptr: *mut Self, guard: &LocalGuard<'_>, ) -> (u64, InsertTarget)
Source§unsafe fn link_sibling(&self, new_sibling: *mut Self)
unsafe fn link_sibling(&self, new_sibling: *mut Self)
Source§unsafe fn assign_ksuf(&self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>)
unsafe fn assign_ksuf(&self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>)
Source§unsafe fn assign_ksuf_init(
&self,
slot: usize,
suffix: &[u8],
guard: &LocalGuard<'_>,
)
unsafe fn assign_ksuf_init( &self, slot: usize, suffix: &[u8], guard: &LocalGuard<'_>, )
Source§unsafe fn clear_ksuf(&self, slot: usize, guard: &LocalGuard<'_>)
unsafe fn clear_ksuf(&self, slot: usize, guard: &LocalGuard<'_>)
Source§fn take_leaf_value_ptr(&self, slot: usize) -> *mut u8
fn take_leaf_value_ptr(&self, slot: usize) -> *mut u8
Source§fn ksuf_equals(&self, slot: usize, suffix: &[u8]) -> bool
fn ksuf_equals(&self, slot: usize, suffix: &[u8]) -> bool
Source§fn ksuf_compare(&self, slot: usize, suffix: &[u8]) -> Option<Ordering>
fn ksuf_compare(&self, slot: usize, suffix: &[u8]) -> Option<Ordering>
Source§fn ksuf_or_empty(&self, slot: usize) -> &[u8] ⓘ
fn ksuf_or_empty(&self, slot: usize) -> &[u8] ⓘ
Source§fn ksuf_matches(&self, slot: usize, ikey: u64, suffix: &[u8]) -> bool
fn ksuf_matches(&self, slot: usize, ikey: u64, suffix: &[u8]) -> bool
Source§fn ksuf_match_result(&self, slot: usize, keylenx: u8, suffix: &[u8]) -> i32
fn ksuf_match_result(&self, slot: usize, keylenx: u8, suffix: &[u8]) -> i32
Source§fn prefetch_ikey(&self, slot: usize)
fn prefetch_ikey(&self, slot: usize)
Source§fn set_modstate(&self, state: u8)
fn set_modstate(&self, state: u8)
Source§fn deleted_layer(&self) -> bool
fn deleted_layer(&self) -> bool
Source§fn mark_deleted_layer(&self)
fn mark_deleted_layer(&self)
gc_layer). Read moreSource§fn mark_remove(&self)
fn mark_remove(&self)
Source§fn is_removing(&self) -> bool
fn is_removing(&self) -> bool
Source§fn is_empty_state(&self) -> bool
fn is_empty_state(&self) -> bool
MODSTATE_EMPTY). Read more