masstree/internode/
accessors.rs1use std::array as StdArray;
2use std::sync::atomic::{Ordering as AtomicOrdering, fence};
3
4use seize::Guard;
5
6use crate::{
7 internode::{InternodeNode, WIDTH},
8 nodeversion::NodeVersion,
9 ordering::{READ_ORD, RELAXED, WRITE_ORD},
10 prefetch::prefetch_read,
11};
12
13impl InternodeNode {
14 #[must_use]
20 #[inline(always)]
21 pub const fn version(&self) -> &NodeVersion {
22 &self.version
23 }
24
25 #[inline(always)]
27 pub const fn version_mut(&mut self) -> &mut NodeVersion {
28 &mut self.version
29 }
30
31 #[must_use]
37 #[inline(always)]
38 pub fn nkeys(&self) -> usize {
39 self.nkeys.load(READ_ORD) as usize
40 }
41
42 #[must_use]
44 #[inline(always)]
45 pub fn size(&self) -> usize {
46 self.nkeys()
47 }
48
49 #[must_use]
51 #[inline(always)]
52 pub fn is_empty(&self) -> bool {
53 self.nkeys() == 0
54 }
55
56 #[must_use]
58 #[inline(always)]
59 pub fn is_full(&self) -> bool {
60 self.nkeys() >= WIDTH
61 }
62
63 #[must_use]
65 #[inline(always)]
66 pub(crate) fn nkeys_relaxed(&self) -> usize {
67 self.nkeys.load(RELAXED) as usize
68 }
69
70 #[must_use]
75 #[inline(always)]
76 #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
77 pub fn ikey(&self, i: usize) -> u64 {
78 debug_assert!(i < WIDTH, "ikey: index {i} out of bounds (WIDTH={WIDTH})");
79 self.ikey0[i].load(READ_ORD)
80 }
81
82 #[must_use]
84 #[inline(always)]
85 #[expect(clippy::indexing_slicing, reason = "i < WIDTH guaranteed by from_fn")]
86 pub fn load_all_ikeys(&self) -> [u64; WIDTH] {
87 let ikeys: [u64; WIDTH] = StdArray::from_fn(|i| self.ikey0[i].load(RELAXED));
88
89 fence(AtomicOrdering::Acquire);
90
91 ikeys
92 }
93
94 #[inline(always)]
99 #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
100 pub fn set_ikey(&self, i: usize, ikey: u64) {
101 debug_assert!(i < WIDTH, "set_ikey: index {i} out of bounds");
102 self.ikey0[i].store(ikey, WRITE_ORD);
103 }
104
105 #[inline(always)]
107 #[expect(clippy::indexing_slicing, reason = "bounds checked by caller")]
108 pub(super) fn set_ikey_relaxed(&self, i: usize, ikey: u64) {
109 self.ikey0[i].store(ikey, RELAXED);
110 }
111
112 #[must_use]
114 #[inline(always)]
115 pub const fn height(&self) -> u32 {
116 self.height as u32
117 }
118
119 #[must_use]
121 #[inline(always)]
122 pub const fn children_are_leaves(&self) -> bool {
123 self.height == 0
124 }
125
126 #[must_use]
135 #[inline(always)]
136 #[expect(
137 clippy::indexing_slicing,
138 reason = "bounds checked via debug_assert; i <= WIDTH (16 children)"
139 )]
140 pub fn child(&self, i: usize, guard: &impl Guard) -> *mut u8 {
141 debug_assert!(i <= WIDTH, "child: index {i} out of bounds");
142 guard.protect(&self.child[i], READ_ORD)
143 }
144
145 #[must_use]
154 #[inline(always)]
155 #[expect(
156 clippy::indexing_slicing,
157 reason = "bounds checked via debug_assert; i <= WIDTH (16 children)"
158 )]
159 pub unsafe fn child_unguarded(&self, i: usize) -> *mut u8 {
160 debug_assert!(i <= WIDTH, "child_unguarded: index {i} out of bounds");
161 self.child[i].load(READ_ORD)
162 }
163
164 #[must_use]
166 #[inline]
167 #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
168 pub fn child_with_prefetch(&self, i: usize, nkeys: usize, guard: &impl Guard) -> *mut u8 {
169 debug_assert!(i <= WIDTH, "child_with_prefetch: index out of bounds");
170
171 let ptr = guard.protect(&self.child[i], READ_ORD);
172
173 if i < nkeys {
174 let next_child_ptr = self.child[i + 1].load(RELAXED);
175
176 prefetch_read(next_child_ptr);
177 prefetch_read(next_child_ptr.wrapping_add(64));
178 }
179
180 ptr
181 }
182
183 #[must_use]
185 #[inline]
186 #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
187 pub fn child_with_depth_prefetch(&self, i: usize, guard: &impl Guard) -> *mut u8 {
188 debug_assert!(i <= WIDTH, "child_with_depth_prefetch: index out of bounds");
189
190 let ptr: *mut u8 = guard.protect(&self.child[i], READ_ORD);
192
193 Self::prefetch_child_internal(ptr);
195
196 ptr
197 }
198
199 #[must_use]
201 #[inline]
202 #[expect(clippy::indexing_slicing, reason = "bounds checked via debug_assert")]
203 pub fn child_with_full_prefetch(&self, i: usize, guard: &impl Guard) -> *mut u8 {
204 debug_assert!(i <= WIDTH, "child_with_full_prefetch: index out of bounds");
205
206 let ptr: *mut u8 = guard.protect(&self.child[i], READ_ORD);
207
208 Self::prefetch_child_full(ptr);
209
210 ptr
211 }
212
213 #[inline(always)]
215 pub(super) fn prefetch_child_internal(ptr: *mut u8) {
216 prefetch_read(ptr);
217 prefetch_read(ptr.wrapping_add(64));
218 }
219
220 #[inline(always)]
222 pub(super) fn prefetch_child_full(ptr: *mut u8) {
223 prefetch_read(ptr);
224 prefetch_read(ptr.wrapping_add(64));
225 prefetch_read(ptr.wrapping_add(128));
226 }
227
228 #[inline(always)]
233 #[expect(
234 clippy::indexing_slicing,
235 reason = "bounds checked via debug_assert; i <= WIDTH (16 children)"
236 )]
237 pub fn set_child(&self, i: usize, child: *mut u8) {
238 debug_assert!(i <= WIDTH, "set_child: index {i} out of bounds");
239 self.child[i].store(child, WRITE_ORD);
240 }
241
242 #[inline(always)]
247 pub fn assign(&self, p: usize, ikey: u64, right_child: *mut u8) {
248 debug_assert!(p < WIDTH, "assign: position {p} out of bounds");
249
250 self.set_ikey(p, ikey);
251 self.set_child(p + 1, right_child);
252 }
253
254 #[inline(always)]
259 pub fn set_nkeys(&self, n: u8) {
260 debug_assert!((n as usize) <= WIDTH, "set_nkeys: count {n} out of bounds");
261 self.nkeys.store(n, WRITE_ORD);
262 }
263
264 #[inline(always)]
269 pub fn inc_nkeys(&self) {
270 let current: u8 = self.nkeys.load(RELAXED);
271 debug_assert!((current as usize) < WIDTH, "inc_nkeys: would overflow");
272 self.nkeys.store(current.wrapping_add(1), WRITE_ORD);
273 }
274}