Skip to main content

masstree/internode/
accessors.rs

1use std::array as StdArray;
2use std::sync::atomic::{Ordering as AtomicOrdering, fence};
3
4use seize::Guard;
5
6use crate::{
7    internode::{InternodeNode, WIDTH},
8    nodeversion::NodeVersion,
9    ordering::{READ_ORD, RELAXED, WRITE_ORD},
10    prefetch::prefetch_read,
11};
12
13impl InternodeNode {
14    // ========================================================================
15    //  Version Accessors
16    // ========================================================================
17
18    /// Get a reference to the node's version.
19    #[must_use]
20    #[inline(always)]
21    pub const fn version(&self) -> &NodeVersion {
22        &self.version
23    }
24
25    /// Get a mutable reference to the node's version.
26    #[inline(always)]
27    pub const fn version_mut(&mut self) -> &mut NodeVersion {
28        &mut self.version
29    }
30
31    // ========================================================================
32    //  Key Accessors
33    // ========================================================================
34
35    /// Get the number of keys in this internode.
36    #[must_use]
37    #[inline(always)]
38    pub fn nkeys(&self) -> usize {
39        self.nkeys.load(READ_ORD) as usize
40    }
41
42    /// Alias for [`Self::nkeys`] for API consistency with leaf nodes.
43    #[must_use]
44    #[inline(always)]
45    pub fn size(&self) -> usize {
46        self.nkeys()
47    }
48
49    /// Check if the internode has no keys.
50    #[must_use]
51    #[inline(always)]
52    pub fn is_empty(&self) -> bool {
53        self.nkeys() == 0
54    }
55
56    /// Check if the internode is full.
57    #[must_use]
58    #[inline(always)]
59    pub fn is_full(&self) -> bool {
60        self.nkeys() >= WIDTH
61    }
62
63    /// Get the number of keys using Relaxed ordering.
64    #[must_use]
65    #[inline(always)]
66    pub(crate) fn nkeys_relaxed(&self) -> usize {
67        self.nkeys.load(RELAXED) as usize
68    }
69
70    /// Get the key at the given index.
71    ///
72    /// # Panics
73    /// Panics in debug mode if `i >= WIDTH`.
74    #[must_use]
75    #[inline(always)]
76    #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
77    pub fn ikey(&self, i: usize) -> u64 {
78        debug_assert!(i < WIDTH, "ikey: index {i} out of bounds (WIDTH={WIDTH})");
79        self.ikey0[i].load(READ_ORD)
80    }
81
82    /// Batch load all ikeys into a local array.
83    #[must_use]
84    #[inline(always)]
85    #[expect(clippy::indexing_slicing, reason = "i < WIDTH guaranteed by from_fn")]
86    pub fn load_all_ikeys(&self) -> [u64; WIDTH] {
87        let ikeys: [u64; WIDTH] = StdArray::from_fn(|i| self.ikey0[i].load(RELAXED));
88
89        fence(AtomicOrdering::Acquire);
90
91        ikeys
92    }
93
94    /// Set the key at the given index.
95    ///
96    /// # Panics
97    /// Panics in debug mode if `i >= WIDTH`.
98    #[inline(always)]
99    #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
100    pub fn set_ikey(&self, i: usize, ikey: u64) {
101        debug_assert!(i < WIDTH, "set_ikey: index {i} out of bounds");
102        self.ikey0[i].store(ikey, WRITE_ORD);
103    }
104
105    /// Set key using Relaxed ordering (for internal shifting during insert).
106    #[inline(always)]
107    #[expect(clippy::indexing_slicing, reason = "bounds checked by caller")]
108    pub(super) fn set_ikey_relaxed(&self, i: usize, ikey: u64) {
109        self.ikey0[i].store(ikey, RELAXED);
110    }
111
112    /// Get the tree height.
113    #[must_use]
114    #[inline(always)]
115    pub const fn height(&self) -> u32 {
116        self.height as u32
117    }
118
119    /// Check if children are leaves (height == 0).
120    #[must_use]
121    #[inline(always)]
122    pub const fn children_are_leaves(&self) -> bool {
123        self.height == 0
124    }
125
126    // ========================================================================
127    //  Child Accessors
128    // ========================================================================
129
130    /// Get the child pointer at the given index.
131    ///
132    /// # Panics
133    /// Panics in debug mode if `i > WIDTH`.
134    #[must_use]
135    #[inline(always)]
136    #[expect(
137        clippy::indexing_slicing,
138        reason = "bounds checked via debug_assert; i <= WIDTH (16 children)"
139    )]
140    pub fn child(&self, i: usize, guard: &impl Guard) -> *mut u8 {
141        debug_assert!(i <= WIDTH, "child: index {i} out of bounds");
142        guard.protect(&self.child[i], READ_ORD)
143    }
144
145    /// Get the child pointer without guard protection.
146    ///
147    /// # Safety
148    ///
149    /// Caller must ensure the child pointer's target won't be retired during use.
150    /// Valid when:
151    /// - Called during `Drop` (no concurrent access)
152    /// - Called in teardown after `reclaim_all()`
153    #[must_use]
154    #[inline(always)]
155    #[expect(
156        clippy::indexing_slicing,
157        reason = "bounds checked via debug_assert; i <= WIDTH (16 children)"
158    )]
159    pub unsafe fn child_unguarded(&self, i: usize) -> *mut u8 {
160        debug_assert!(i <= WIDTH, "child_unguarded: index {i} out of bounds");
161        self.child[i].load(READ_ORD)
162    }
163
164    /// Get the child pointer with prefetch hint for the next likely child.
165    #[must_use]
166    #[inline]
167    #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
168    pub fn child_with_prefetch(&self, i: usize, nkeys: usize, guard: &impl Guard) -> *mut u8 {
169        debug_assert!(i <= WIDTH, "child_with_prefetch: index out of bounds");
170
171        let ptr = guard.protect(&self.child[i], READ_ORD);
172
173        if i < nkeys {
174            let next_child_ptr = self.child[i + 1].load(RELAXED);
175
176            prefetch_read(next_child_ptr);
177            prefetch_read(next_child_ptr.wrapping_add(64));
178        }
179
180        ptr
181    }
182
183    /// Get the child pointer with depth-first prefetch for tree descent.
184    #[must_use]
185    #[inline]
186    #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
187    pub fn child_with_depth_prefetch(&self, i: usize, guard: &impl Guard) -> *mut u8 {
188        debug_assert!(i <= WIDTH, "child_with_depth_prefetch: index out of bounds");
189
190        // Protected load for the child we're actually returning
191        let ptr: *mut u8 = guard.protect(&self.child[i], READ_ORD);
192
193        // Prefetch the target child's header and key array
194        Self::prefetch_child_internal(ptr);
195
196        ptr
197    }
198
199    /// Get the child pointer with full prefetch for tree descent.
200    #[must_use]
201    #[inline]
202    #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
203    pub fn child_with_full_prefetch(&self, i: usize, guard: &impl Guard) -> *mut u8 {
204        debug_assert!(i <= WIDTH, "child_with_full_prefetch: index out of bounds");
205
206        let ptr: *mut u8 = guard.protect(&self.child[i], READ_ORD);
207
208        Self::prefetch_child_full(ptr);
209
210        ptr
211    }
212
213    /// Prefetch a child node's header and first two key cache lines.
214    #[inline(always)]
215    pub(super) fn prefetch_child_internal(ptr: *mut u8) {
216        prefetch_read(ptr);
217        prefetch_read(ptr.wrapping_add(64));
218    }
219
220    /// Prefetch a child node's header and all three key cache lines.
221    #[inline(always)]
222    pub(super) fn prefetch_child_full(ptr: *mut u8) {
223        prefetch_read(ptr);
224        prefetch_read(ptr.wrapping_add(64));
225        prefetch_read(ptr.wrapping_add(128));
226    }
227
228    /// Set the child pointer at the given index.
229    ///
230    /// # Panics
231    /// Panics in debug mode if `i > WIDTH`.
232    #[inline(always)]
233    #[expect(
234        clippy::indexing_slicing,
235        reason = "bounds checked via debug_assert; i <= WIDTH (16 children)"
236    )]
237    pub fn set_child(&self, i: usize, child: *mut u8) {
238        debug_assert!(i <= WIDTH, "set_child: index {i} out of bounds");
239        self.child[i].store(child, WRITE_ORD);
240    }
241
242    /// Assign a key and its right child at position `p`.
243    ///
244    /// # Panics
245    /// Panics in debug mode if `p >= WIDTH`.
246    #[inline(always)]
247    pub fn assign(&self, p: usize, ikey: u64, right_child: *mut u8) {
248        debug_assert!(p < WIDTH, "assign: position {p} out of bounds");
249
250        self.set_ikey(p, ikey);
251        self.set_child(p + 1, right_child);
252    }
253
254    /// Set the number of keys.
255    ///
256    /// # Panics
257    /// Panics in debug mode if `n > WIDTH`.
258    #[inline(always)]
259    pub fn set_nkeys(&self, n: u8) {
260        debug_assert!((n as usize) <= WIDTH, "set_nkeys: count {n} out of bounds");
261        self.nkeys.store(n, WRITE_ORD);
262    }
263
264    /// Increment the number of keys by 1.
265    ///
266    /// # Panics
267    /// Panics in debug mode if already at WIDTH.
268    #[inline(always)]
269    pub fn inc_nkeys(&self) {
270        let current: u8 = self.nkeys.load(RELAXED);
271        debug_assert!((current as usize) < WIDTH, "inc_nkeys: would overflow");
272        self.nkeys.store(current.wrapping_add(1), WRITE_ORD);
273    }
274}