1use super::*;
2use std::alloc::alloc;
3use std::slice::SliceIndex;
4use std::{
5 alloc::Layout,
6 mem::{self, MaybeUninit},
7};
8
9const N: usize = super::consts::INNER_N;
10const C: usize = super::consts::INNER_C;
11
12#[derive(Debug)]
16pub struct InnerNode<K: Key, A: Argument<K>> {
17 size: u16,
18
19 slot_key: [MaybeUninit<K>; N],
20 child_id: [MaybeUninit<NodeId>; C],
21 arguments: [MaybeUninit<A>; C],
23}
24
25impl<K: Key, A: Argument<K>> Drop for InnerNode<K, A> {
26 fn drop(&mut self) {
27 unsafe {
29 for k in self.key_area_mut(..self.len()) {
30 k.assume_init_drop();
31 }
32
33 for m in self.arguments_area_mut(..self.len() + 1) {
34 m.assume_init_drop();
35 }
36 }
37 }
38}
39
40impl<K: Key, A: Argument<K>> Clone for InnerNode<K, A> {
41 fn clone(&self) -> Self {
42 let mut new_key = unsafe { MaybeUninit::<[MaybeUninit<K>; N]>::uninit().assume_init() };
44
45 for i in 0..self.len() {
46 unsafe {
47 *new_key.get_unchecked_mut(i) =
48 MaybeUninit::new(self.key_area(i).assume_init_ref().clone());
49 };
50 }
51
52 let mut new_arguments =
54 unsafe { MaybeUninit::<[MaybeUninit<A>; C]>::uninit().assume_init() };
55
56 for i in 0..self.len() + 1 {
57 unsafe {
58 *new_arguments.get_unchecked_mut(i) =
59 MaybeUninit::new(self.argument_area(i).assume_init_ref().clone());
60 };
61 }
62
63 Self {
64 size: self.size.clone(),
65 slot_key: new_key,
66 child_id: self.child_id.clone(),
67 arguments: new_arguments,
68 }
69 }
70}
71
72impl<K: Key, A: Argument<K>> InnerNode<K, A> {
73 pub fn len(&self) -> usize {
75 self.size as usize
76 }
77
78 pub const fn max_capacity() -> u16 {
80 N as u16
81 }
82
83 pub const fn split_origin_size() -> usize {
85 N / 2
86 }
87
88 pub const fn split_new_size() -> usize {
90 N - Self::split_origin_size()
91 }
92
93 const fn minimum_size() -> usize {
95 super::consts::MIN_N
96 }
97
98 pub fn able_to_lend(&self) -> bool {
100 self.size > Self::minimum_size() as u16
101 }
102
103 pub fn is_full(&self) -> bool {
105 self.size == N as u16
106 }
107
108 pub(crate) fn empty() -> Box<Self> {
110 let layout = Layout::new::<mem::MaybeUninit<Self>>();
111 let ptr: *mut Self = unsafe { alloc(layout).cast() };
112 let mut this = unsafe { Box::from_raw(ptr) };
113 this.size = 0;
114 this
115 }
116
117 pub(crate) fn new<I: Into<NodeId> + Copy + Clone, const N1: usize, const C1: usize>(
119 slot_keys: [K; N1],
120 child_id: [I; C1],
121 arguments: [A; C1],
122 ) -> Box<Self> {
123 Self::new_from_iter(slot_keys, child_id.map(|c| c.into()), arguments)
124 }
125
126 pub fn new_from_iter(
128 keys: impl IntoIterator<Item = K>,
129 childs: impl IntoIterator<Item = NodeId>,
130 arguments: impl IntoIterator<Item = A>,
131 ) -> Box<Self> {
132 let mut node = Self::empty();
133
134 let keys = keys.into_iter();
135 let childs = childs.into_iter();
136 let arguments = arguments.into_iter();
137
138 let mut key_size = 0;
139 for (idx, k) in keys.enumerate() {
140 node.slot_key[idx] = MaybeUninit::new(k);
141 key_size += 1;
142 }
143
144 let mut child_size = 0;
145 for (idx, c) in childs.enumerate() {
146 node.child_id[idx] = MaybeUninit::new(c);
147 child_size += 1;
148 }
149
150 for (idx, m) in arguments.enumerate() {
151 node.arguments[idx] = MaybeUninit::new(m);
152 }
153
154 assert!(key_size + 1 == child_size);
155 node.size = key_size;
156
157 node
158 }
159
160 pub(crate) fn keys(&self) -> &[K] {
161 unsafe {
162 {
163 let slice: &[MaybeUninit<K>] =
164 self.slot_key.get_unchecked(..usize::from(self.size));
165 &*(slice as *const [MaybeUninit<K>] as *const [K])
170 }
171 }
172 }
173
174 pub(crate) fn arguments(&self) -> &[A] {
175 unsafe {
176 {
177 let slice: &[MaybeUninit<A>] =
178 self.arguments.get_unchecked(..usize::from(self.size + 1));
179 &*(slice as *const [MaybeUninit<A>] as *const [A])
184 }
185 }
186 }
187
188 pub(crate) fn set_argument(&mut self, idx: usize, a: A) {
190 unsafe {
192 std::mem::replace(self.arguments_area_mut(idx), MaybeUninit::new(a)).assume_init_drop()
193 }
194 }
195
196 #[inline]
198 pub fn locate_child<Q: ?Sized>(&self, k: &Q) -> (usize, NodeId)
199 where
200 K: std::borrow::Borrow<Q>,
201 Q: Ord,
202 {
203 match self.keys().binary_search_by_key(&k, |f| f.borrow()) {
204 Err(idx) => {
205 (idx, self.child_id(idx))
208 }
209 Ok(idx) => {
210 (idx + 1, self.child_id(idx + 1))
213 }
214 }
215 }
216
217 pub(crate) fn insert_at(
219 &mut self,
220 slot: usize,
221 key: K,
222 right_child: NodeId,
223 right_child_argument: A,
224 ) {
225 debug_assert!(slot <= self.len());
226 debug_assert!(!self.is_full());
227
228 let new_size = self.size as usize + 1;
229 let new_child_size = self.size as usize + 2;
230
231 unsafe {
234 slice_utils::slice_insert(self.key_area_mut(..new_size), slot, key);
235 slice_utils::slice_insert(self.child_area_mut(..new_child_size), slot + 1, right_child);
236 slice_utils::slice_insert(
237 self.arguments_area_mut(..new_child_size),
238 slot + 1,
239 right_child_argument,
240 );
241 }
242
243 self.size += 1;
244 }
245
246 pub(crate) fn split(
251 &mut self,
252 child_idx: usize,
253 k: K,
254 new_child_id: NodeId,
255 new_child_argument: A,
256 ) -> (K, Box<Self>) {
257 debug_assert!(self.is_full());
258
259 let mut new_node = Self::empty();
260 new_node.size = Self::split_new_size() as u16;
261
262 let new_key: K;
263
264 let split_origin_size = Self::split_origin_size();
265 let split_new_size = Self::split_new_size();
266
267 self.size = split_origin_size as u16;
268
269 if child_idx < split_origin_size {
270 new_key = unsafe { self.key_area(split_origin_size - 1).assume_init_read() };
279
280 unsafe {
281 slice_utils::move_to_slice(
282 self.key_area_mut(split_origin_size..N),
283 new_node.key_area_mut(..split_new_size),
284 );
285 slice_utils::move_to_slice(
286 self.child_area_mut(split_origin_size..N + 1),
287 new_node.child_area_mut(..split_new_size + 1),
288 );
289 slice_utils::move_to_slice(
290 self.arguments_area_mut(split_origin_size..N + 1),
291 new_node.arguments_area_mut(..split_new_size + 1),
292 );
293
294 slice_utils::slice_insert(
295 self.key_area_mut(..self.size as usize + 1),
296 child_idx,
297 k,
298 );
299 slice_utils::slice_insert(
300 self.child_area_mut(..self.size as usize + 2),
301 child_idx + 1,
302 new_child_id,
303 );
304 slice_utils::slice_insert(
305 self.arguments_area_mut(..self.size as usize + 2),
306 child_idx + 1,
307 new_child_argument,
308 );
309 };
310 } else if child_idx > split_origin_size {
311 let prompt_key_index = split_origin_size;
319 new_key = unsafe { self.key_area(prompt_key_index).assume_init_read() };
320
321 let new_slot_idx = child_idx - 1 - split_origin_size;
322 let new_child_idx = child_idx - split_origin_size;
323
324 unsafe {
325 slice_utils::move_to_slice(
326 self.key_area_mut(prompt_key_index + 1..prompt_key_index + new_slot_idx + 1),
327 new_node.key_area_mut(..new_slot_idx),
328 );
329 slice_utils::move_to_slice(
330 self.child_area_mut(
331 split_origin_size + 1..split_origin_size + 1 + new_child_idx,
332 ),
333 new_node.child_area_mut(0..new_child_idx),
334 );
335 slice_utils::move_to_slice(
336 self.arguments_area_mut(
337 split_origin_size + 1..split_origin_size + 1 + new_child_idx,
338 ),
339 new_node.arguments_area_mut(0..new_child_idx),
340 );
341
342 *new_node.key_area_mut(new_slot_idx) = MaybeUninit::new(k);
343 *new_node.child_area_mut(new_child_idx) = MaybeUninit::new(new_child_id);
344 *new_node.arguments_area_mut(new_child_idx) = MaybeUninit::new(new_child_argument);
345
346 slice_utils::move_to_slice(
347 self.key_area_mut(prompt_key_index + new_slot_idx + 1..N),
348 new_node.key_area_mut(new_slot_idx + 1..split_new_size),
349 );
350 slice_utils::move_to_slice(
351 self.child_area_mut(split_origin_size + 1 + new_child_idx..N + 1),
352 new_node.child_area_mut(new_child_idx + 1..split_new_size + 1),
353 );
354 slice_utils::move_to_slice(
355 self.arguments_area_mut(split_origin_size + 1 + new_child_idx..N + 1),
356 new_node.arguments_area_mut(new_child_idx + 1..split_new_size + 1),
357 );
358 };
359 } else {
360 new_key = k;
367
368 unsafe {
369 slice_utils::move_to_slice(
370 self.key_area_mut(split_origin_size..N),
371 new_node.key_area_mut(..split_new_size),
372 );
373 slice_utils::move_to_slice(
374 self.child_area_mut(split_origin_size + 1..N + 1),
375 new_node.child_area_mut(1..split_new_size + 1),
376 );
377 slice_utils::move_to_slice(
378 self.arguments_area_mut(split_origin_size + 1..N + 1),
379 new_node.arguments_area_mut(1..split_new_size + 1),
380 );
381
382 *new_node.child_area_mut(0) = MaybeUninit::new(new_child_id);
383 *new_node.arguments_area_mut(0) = MaybeUninit::new(new_child_argument);
384 };
385 }
386
387 (new_key, new_node)
388 }
389
390 pub(crate) fn remove_slot_with_right(&mut self, slot: usize) -> (InnerMergeResult, K) {
393 let k = unsafe {
394 let k = slice_utils::slice_remove(self.key_area_mut(..self.size as usize), slot);
395 slice_utils::slice_remove(self.child_area_mut(..self.size as usize + 1), slot + 1);
396 slice_utils::slice_remove(self.arguments_area_mut(..self.size as usize + 1), slot + 1);
397 k
398 };
399 self.size -= 1;
400
401 if self.size >= Self::minimum_size() as u16 {
402 (InnerMergeResult::Done, k)
403 } else {
404 (InnerMergeResult::UnderSize, k)
406 }
407 }
408
409 pub(crate) fn merge_next(&mut self, slot_key: K, right: &mut Self) {
410 unsafe {
411 *self.key_area_mut(self.size as usize) = MaybeUninit::new(slot_key);
412 self.size += 1;
413
414 let self_size = self.size as usize;
415 let right_size = right.len();
416
417 debug_assert!(self.len() + right_size <= N);
418
419 slice_utils::move_to_slice(
420 right.key_area_mut(..right_size),
421 self.key_area_mut(self_size..self_size + right_size),
422 );
423 slice_utils::move_to_slice(
424 right.child_area_mut(..right_size + 1),
425 self.child_area_mut(self_size..self_size + right_size + 1),
426 );
427 slice_utils::move_to_slice(
428 right.arguments_area_mut(..right_size + 1),
429 self.arguments_area_mut(self_size..self_size + right_size + 1),
430 );
431 self.size += right.size;
432 right.size = 0;
433 }
434 }
435
436 pub(crate) fn pop(&mut self) -> (K, NodeId, A) {
438 let k = std::mem::replace(
439 unsafe { self.key_area_mut(self.size as usize - 1) },
440 MaybeUninit::uninit(),
441 );
442 let child = std::mem::replace(
443 unsafe { self.child_area_mut(self.size as usize) },
444 MaybeUninit::uninit(),
445 );
446 let argument = std::mem::replace(
447 unsafe { self.arguments_area_mut(self.size as usize) },
448 MaybeUninit::uninit(),
449 );
450 self.size -= 1;
451 unsafe {
452 (
453 k.assume_init_read(),
454 child.assume_init_read(),
455 argument.assume_init_read(),
456 )
457 }
458 }
459
460 pub(crate) fn pop_front(&mut self) -> (K, NodeId, A) {
461 let (k, left_c, left_m) = unsafe {
462 let k = slice_utils::slice_remove(self.key_area_mut(..self.size as usize), 0);
463 let left_c =
464 slice_utils::slice_remove(self.child_area_mut(..self.size as usize + 1), 0);
465 let left_m =
466 slice_utils::slice_remove(self.arguments_area_mut(..self.size as usize + 1), 0);
467 (k, left_c, left_m)
468 };
469 self.size -= 1;
470
471 (k, left_c, left_m)
472 }
473
474 pub fn push(&mut self, k: K, child: NodeId, argument: A) {
475 unsafe {
476 *self.key_area_mut(self.size as usize) = MaybeUninit::new(k);
477 *self.child_area_mut(self.size as usize + 1) = MaybeUninit::new(child);
478 *self.arguments_area_mut(self.size as usize + 1) = MaybeUninit::new(argument);
479 };
480 self.size += 1;
481 }
482
483 pub(crate) fn push_front(&mut self, k: K, child: NodeId, argument: A) {
484 unsafe {
485 slice_utils::slice_insert(self.key_area_mut(0..self.size as usize + 1), 0, k);
486 slice_utils::slice_insert(self.child_area_mut(0..self.size as usize + 2), 0, child);
487 slice_utils::slice_insert(
488 self.arguments_area_mut(0..self.size as usize + 2),
489 0,
490 argument,
491 );
492 }
493 self.size += 1;
494 }
495
496 #[cfg(test)]
497 fn iter_key(&self) -> impl Iterator<Item = &K> {
498 unsafe { self.key_area(..self.size as usize) }
499 .iter()
500 .map(|s| unsafe { s.assume_init_ref() })
501 }
502
503 #[cfg(test)]
504 fn iter_child(&self) -> impl Iterator<Item = NodeId> + '_ {
505 let slice = if self.size > 0 {
506 &self.child_id[0..self.len() + 1]
507 } else {
508 &self.child_id[..0]
509 };
510
511 slice.iter().map(|s| unsafe { s.assume_init_read() })
512 }
513
514 #[cfg(test)]
515 fn iter_argument(&self) -> impl Iterator<Item = A> + '_ {
516 let slice = if self.size > 0 {
517 &self.arguments[0..self.len() + 1]
518 } else {
519 &self.arguments[..0]
520 };
521
522 slice.iter().map(|s| unsafe { s.assume_init_read() })
523 }
524
525 #[cfg(test)]
527 pub(crate) fn key_vec(&self) -> Vec<K> {
528 self.iter_key().cloned().collect()
529 }
530
531 #[cfg(test)]
533 pub(crate) fn child_id_vec(&self) -> Vec<NodeId> {
534 self.iter_child().collect()
535 }
536
537 #[cfg(test)]
538 pub(crate) fn argument_vec(&self) -> Vec<A> {
539 self.iter_argument().collect()
540 }
541
542 pub fn key(&self, idx: usize) -> &K {
543 unsafe { self.key_area(idx).assume_init_ref() }
544 }
545
546 pub(crate) fn set_key(&mut self, idx: usize, key: K) -> K {
547 unsafe {
548 std::mem::replace(self.key_area_mut(idx), MaybeUninit::new(key)).assume_init_read()
549 }
550 }
551
552 pub(crate) fn child_id(&self, idx: usize) -> NodeId {
553 unsafe { self.child_area(idx).assume_init_read() }
554 }
555
556 unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
557 where
558 I: SliceIndex<[MaybeUninit<K>], Output = Output>,
559 {
560 unsafe { self.slot_key.as_mut_slice().get_unchecked_mut(index) }
564 }
565
566 unsafe fn key_area<I, Output: ?Sized>(&self, index: I) -> &Output
567 where
568 I: SliceIndex<[MaybeUninit<K>], Output = Output>,
569 {
570 unsafe { self.slot_key.as_slice().get_unchecked(index) }
574 }
575
576 unsafe fn argument_area<I, Output: ?Sized>(&self, index: I) -> &Output
577 where
578 I: SliceIndex<[MaybeUninit<A>], Output = Output>,
579 {
580 unsafe { self.arguments.as_slice().get_unchecked(index) }
584 }
585
586 unsafe fn arguments_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
587 where
588 I: SliceIndex<[MaybeUninit<A>], Output = Output>,
589 {
590 unsafe { self.arguments.as_mut_slice().get_unchecked_mut(index) }
594 }
595
596 unsafe fn child_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
597 where
598 I: SliceIndex<[MaybeUninit<NodeId>], Output = Output>,
599 {
600 unsafe { self.child_id.as_mut_slice().get_unchecked_mut(index) }
604 }
605
606 unsafe fn child_area<I, Output: ?Sized>(&self, index: I) -> &Output
607 where
608 I: SliceIndex<[MaybeUninit<NodeId>], Output = Output>,
609 {
610 unsafe { self.child_id.as_slice().get_unchecked(index) }
614 }
615
616 #[cfg(test)]
617 pub(crate) fn set_data<I: Into<NodeId> + Copy + Clone, const N1: usize, const C1: usize>(
618 &mut self,
619 slot_keys: [K; N1],
620 child_id: [I; C1],
621 ) {
622 assert!(N1 + 1 == C1);
623 assert!(N1 <= N);
624 self.size = N1 as u16;
625 for i in 0..N1 {
626 self.slot_key[i] = MaybeUninit::new(slot_keys[i].clone());
627 }
628
629 for c in 0..C1 {
630 self.child_id[c] = MaybeUninit::new(child_id[c].into());
631 }
632 }
633}
634
635pub enum InnerMergeResult {
637 Done,
638 UnderSize,
639}