1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
//! =============================================================================
//! Generic Layer Creation
//! =============================================================================
use std::ptr as StdPtr;
// Note: InsertError no longer used here - allocations are infallible
use crate::leaf_trait::TreeLeafNode;
use crate::leaf15::LeafNode15;
use super::{
Key, LAYER_KEYLENX, LeafPolicy, LocalGuard, MassTreeGeneric, Ordering, TreeAllocator,
TreePermutation,
};
impl<P, A> MassTreeGeneric<P, A>
where
P: LeafPolicy,
A: TreeAllocator<P>,
{
/// Assign two entries to the final layer leaf in sorted order.
///
/// The entries are ordered by:
/// 1. ikey comparison (lexicographic via u64 big-endian)
/// 2. If ikeys equal: shorter key first (prefix before extension)
///
/// # Safety
///
/// - `final_ptr` must be valid and point to an empty leaf
/// - `guard` must come from this tree's collector
/// - Caller must ensure no concurrent access to `final_ptr`
#[expect(clippy::too_many_arguments, reason = "Internal helper")]
#[expect(clippy::unused_self, reason = "API Consistency")]
unsafe fn assign_final_layer_entries(
&self,
final_ptr: *mut LeafNode15<P>,
existing_key: &Key<'_>,
existing_output: Option<P::Output>,
new_key: &Key<'_>,
new_arc: Option<P::Output>,
cmp: Ordering,
guard: &LocalGuard<'_>,
) {
// SAFETY: final_ptr is valid per caller contract
let final_leaf: &LeafNode15<P> = unsafe { &*final_ptr };
match cmp {
Ordering::Less => {
// SAFETY: guard requirement passed through from caller
unsafe {
final_leaf.assign_from_key(0, existing_key, existing_output, guard);
final_leaf.assign_from_key(1, new_key, new_arc, guard);
}
}
Ordering::Greater => {
// SAFETY: guard requirement passed through from caller
unsafe {
final_leaf.assign_from_key(0, new_key, new_arc, guard);
final_leaf.assign_from_key(1, existing_key, existing_output, guard);
}
}
Ordering::Equal => {
if existing_key.current_len() <= new_key.current_len() {
// SAFETY: guard requirement passed through from caller
unsafe {
final_leaf.assign_from_key(0, existing_key, existing_output, guard);
final_leaf.assign_from_key(1, new_key, new_arc, guard);
}
} else {
// SAFETY: guard requirement passed through from caller
unsafe {
final_leaf.assign_from_key(0, new_key, new_arc, guard);
final_leaf.assign_from_key(1, existing_key, existing_output, guard);
}
}
}
}
// Relaxed is sufficient: this leaf is not yet visible to other threads.
// The store_layer Release in the caller publishes it.
final_leaf.set_permutation_relaxed(
<<LeafNode15<P> as TreeLeafNode<P>>::Perm as TreePermutation>::make_sorted(2),
);
}
/// Create a new layer for suffix conflict (generic version).
///
/// # Safety
///
/// - Caller must hold the lock on `parent_leaf`
/// - Caller must have called `lock.mark_insert()` before calling this
/// - `guard` must come from this tree's collector
#[cold]
#[inline(never)]
pub(super) unsafe fn create_layer_concurrent_generic(
&self,
parent_leaf: &LeafNode15<P>,
conflict_slot: usize,
new_key: &mut Key<'_>,
new_value: P::Output,
guard: &LocalGuard<'_>,
) -> *mut u8 {
// STEP: 1 - Extract existing key's suffix and value
debug_assert!(
parent_leaf.ksuf(conflict_slot).is_some(),
"conflict slot {conflict_slot} should have a suffix \
(keylenx={}, ikey=0x{:016x})",
parent_leaf.keylenx(conflict_slot),
parent_leaf.ikey(conflict_slot),
);
let existing_suffix: &[u8] = parent_leaf.ksuf(conflict_slot).unwrap_or(&[]);
let mut existing_key: Key<'_> = Key::from_suffix(existing_suffix);
let existing_output: Option<P::Output> =
parent_leaf.try_clone_output_relaxed(conflict_slot);
debug_assert!(
existing_output.is_some(),
"try_create_layer_concurrent_generic: conflict slot should contain a value"
);
if new_key.has_suffix() {
new_key.shift();
}
let mut cmp: Ordering = existing_key.compare(new_key.ikey(), new_key.current_len());
let mut twig_head: Option<*mut LeafNode15<P>> = None;
let mut twig_tail: *mut LeafNode15<P> = StdPtr::null_mut();
while (cmp == Ordering::Equal) && existing_key.has_suffix() && new_key.has_suffix() {
let twig_ptr: *mut LeafNode15<P> = self.allocator.alloc_leaf_direct(false, true);
// SAFETY: twig_ptr is valid, we just allocated it
unsafe {
(*twig_ptr).set_ikey(0, existing_key.ikey());
// Relaxed: twig is not yet visible. store_layer Release publishes it.
(*twig_ptr).set_permutation_relaxed(
<<LeafNode15<P> as TreeLeafNode<P>>::Perm as TreePermutation>::make_sorted(1),
);
}
if twig_head.is_some() {
// SAFETY: twig_tail is valid from previous iteration
unsafe {
(*twig_tail).set_keylenx(0, LAYER_KEYLENX);
(*twig_tail).store_layer(0, twig_ptr.cast::<u8>());
}
} else {
twig_head = Some(twig_ptr);
}
twig_tail = twig_ptr;
// Shift both keys
existing_key.shift();
new_key.shift();
cmp = existing_key.compare(new_key.ikey(), new_key.current_len());
}
let final_ptr: *mut LeafNode15<P> = self.allocator.alloc_leaf_direct(false, true);
// SAFETY: final_ptr is valid, guard is from caller
unsafe {
self.assign_final_layer_entries(
final_ptr,
&existing_key,
existing_output,
new_key,
Some(new_value),
cmp,
guard,
);
}
let result_ptr: *mut u8 = match twig_head {
Some(head) => {
unsafe {
(*twig_tail).set_keylenx(0, LAYER_KEYLENX);
(*twig_tail).store_layer(0, final_ptr.cast::<u8>());
}
head.cast::<u8>()
}
None => final_ptr.cast::<u8>(),
};
result_ptr
}
}