1use core::borrow::Borrow;
2use core::cmp::Ordering;
3use core::fmt::{self, Debug};
4use core::hash::{Hash, Hasher};
5use core::iter::FromIterator;
6use core::mem;
7use core::ops::{
8 Bound::{Excluded, Included},
9 Index, RangeBounds, Sub,
10};
11
12use super::arena::Arena;
13use super::error::SgError;
14use super::iter::{IntoIter, Iter, IterMut};
15use super::node::{NodeGetHelper, NodeRebuildHelper};
16use super::node_dispatch::SmallNode;
17
18#[allow(unused_imports)] use micromath::F32Ext;
20use smallnum::SmallUnsigned;
21use tinyvec::{array_vec, ArrayVec};
22
23pub type Idx = u16;
25
26const DEFAULT_ALPHA_NUM: f32 = 2.0;
28const DEFAULT_ALPHA_DENOM: f32 = 3.0;
29
30#[derive(Clone)]
32pub struct SgTree<K: Default, V: Default, const N: usize> {
33 pub(crate) arena: Arena<K, V, Idx, N>,
35 pub(crate) opt_root_idx: Option<usize>,
36
37 pub(crate) max_idx: usize,
39 pub(crate) min_idx: usize,
40 curr_size: usize,
41
42 alpha_num: f32,
44 alpha_denom: f32,
45 max_size: usize,
46 rebal_cnt: usize,
47}
48
49impl<K: Ord + Default, V: Default, const N: usize> SgTree<K, V, N> {
50 pub fn new() -> Self {
54 if N > SgTree::<K, V, N>::max_capacity() {
55 panic!("Max stack item capacity (0x{:x}) exceeded!", Idx::MAX);
56 }
57
58 SgTree {
59 arena: Arena::<K, V, Idx, N>::default(),
60 opt_root_idx: None,
61 max_idx: 0,
62 min_idx: 0,
63 curr_size: 0,
64 alpha_num: DEFAULT_ALPHA_NUM,
65 alpha_denom: DEFAULT_ALPHA_DENOM,
66 max_size: 0,
67 rebal_cnt: 0,
68 }
69 }
70
71 pub fn set_rebal_param(&mut self, alpha_num: f32, alpha_denom: f32) -> Result<(), SgError> {
83 let a = alpha_num / alpha_denom;
84 match (0.5..1.0).contains(&a) {
85 true => {
86 self.alpha_num = alpha_num;
87 self.alpha_denom = alpha_denom;
88 Ok(())
89 }
90 false => Err(SgError::RebalanceFactorOutOfRange),
91 }
92 }
93
94 pub fn rebal_param(&self) -> (f32, f32) {
97 (self.alpha_num, self.alpha_denom)
98 }
99
100 pub fn capacity(&self) -> usize {
102 self.arena.capacity()
103 }
104
105 pub fn node_size(&self) -> usize {
107 self.arena.node_size()
108 }
109
110 pub fn append(&mut self, other: &mut SgTree<K, V, N>)
112 where
113 K: Ord,
114 {
115 if other.is_empty() {
117 return;
118 }
119
120 if self.is_empty() {
122 mem::swap(self, other);
123 return;
124 }
125
126 for arena_idx in 0..other.arena.len() {
128 if let Some(mut node) = other.arena.remove(arena_idx) {
129 self.insert(node.take_key(), node.take_val());
130 }
131 }
132 other.clear();
133 }
134
135 pub fn try_append(&mut self, other: &mut SgTree<K, V, N>) -> Result<(), SgError> {
137 if other.is_empty() {
139 return Ok(());
140 }
141
142 if self.is_empty() {
144 mem::swap(self, other);
145 return Ok(());
146 }
147
148 if (self.len() + other.len() - self.intersect_cnt(other)) <= self.capacity() {
150 for arena_idx in 0..other.arena.len() {
151 if let Some(mut node) = other.arena.remove(arena_idx) {
152 self.try_insert(node.take_key(), node.take_val())?;
153 }
154 }
155 other.clear();
156 } else {
157 return Err(SgError::StackCapacityExceeded);
160 }
161
162 Ok(())
163 }
164
165 pub fn insert(&mut self, key: K, val: V) -> Option<V>
170 where
171 K: Ord,
172 {
173 self.internal_balancing_insert::<Idx>(key, val).0
174 }
175
176 pub fn try_insert(&mut self, key: K, val: V) -> Result<Option<V>, SgError>
182 where
183 K: Ord,
184 {
185 match self.contains_key(&key) || (self.capacity() > self.len()) {
187 true => Ok(self.internal_balancing_insert::<Idx>(key, val).0),
188 false => Err(SgError::StackCapacityExceeded),
189 }
190 }
191
192 pub fn try_extend<I: ExactSizeIterator + IntoIterator<Item = (K, V)>>(
194 &mut self,
195 iter: I,
196 ) -> Result<(), SgError> {
197 if iter.len() <= (self.capacity() - self.len()) {
198 iter.into_iter().for_each(move |(k, v)| {
199 assert!(self.try_insert(k, v).is_ok());
200 });
201 Ok(())
202 } else {
203 Err(SgError::StackCapacityExceeded)
204 }
205 }
206
207 pub fn try_from_iter<I: ExactSizeIterator + IntoIterator<Item = (K, V)>>(
210 iter: I,
211 ) -> Result<Self, SgError> {
212 match iter.len() <= SgTree::<K, V, N>::max_capacity() {
213 true => Ok(SgTree::from_iter(iter)),
214 false => Err(SgError::MaximumCapacityExceeded),
215 }
216 }
217
218 pub fn iter(&self) -> Iter<'_, K, V, N> {
220 Iter::new(self)
221 }
222
223 pub fn iter_mut(&mut self) -> IterMut<'_, K, V, N> {
225 IterMut::new(self)
226 }
227
228 pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
233 where
234 K: Borrow<Q> + Ord,
235 Q: Ord + ?Sized,
236 {
237 match self.priv_remove_by_key(key) {
238 Some((key, val)) => {
239 if self.max_size > (2 * self.curr_size) {
240 if let Some(root_idx) = self.opt_root_idx {
241 self.rebuild::<Idx>(root_idx);
242 self.max_size = self.curr_size;
243 }
244 }
245 Some((key, val))
246 }
247 None => None,
248 }
249 }
250
251 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
256 where
257 K: Borrow<Q> + Ord,
258 Q: Ord + ?Sized,
259 {
260 self.remove_entry(key).map(|(_, v)| v)
261 }
262
263 pub fn retain<F>(&mut self, mut f: F)
265 where
266 F: FnMut(&K, &mut V) -> bool,
267 K: Ord,
268 {
269 self.priv_drain_filter(|k, v| !f(k, v));
270 }
271
272 pub fn split_off<Q>(&mut self, key: &Q) -> Self
274 where
275 K: Borrow<Q> + Ord,
276 Q: Ord + ?Sized,
277 {
278 self.priv_drain_filter(|k, _| k >= key)
279 }
280
281 pub fn get_key_value<Q>(&self, key: &Q) -> Option<(&K, &V)>
286 where
287 K: Borrow<Q> + Ord,
288 Q: Ord + ?Sized,
289 {
290 let ngh: NodeGetHelper<Idx> = self.internal_get(None, key);
291 match ngh.node_idx() {
292 Some(idx) => {
293 let node = &self.arena[idx];
294 Some((node.key(), node.val()))
295 }
296 None => None,
297 }
298 }
299
300 pub fn get<Q>(&self, key: &Q) -> Option<&V>
305 where
306 K: Borrow<Q> + Ord,
307 Q: Ord + ?Sized,
308 {
309 self.get_key_value(key).map(|(_, v)| v)
310 }
311
312 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
317 where
318 K: Borrow<Q> + Ord,
319 Q: Ord + ?Sized,
320 {
321 let ngh: NodeGetHelper<Idx> = self.internal_get(None, key);
322 match ngh.node_idx() {
323 Some(idx) => {
324 let (_, val) = self.arena[idx].get_mut();
325 Some(val)
326 }
327 None => None,
328 }
329 }
330
331 pub fn clear(&mut self) {
333 if !self.is_empty() {
334 let rebal_cnt = self.rebal_cnt;
335 *self = SgTree::new();
336 self.rebal_cnt = rebal_cnt;
337 }
338 }
339
340 pub fn contains_key<Q>(&self, key: &Q) -> bool
345 where
346 K: Borrow<Q> + Ord,
347 Q: Ord + ?Sized,
348 {
349 self.get(key).is_some()
350 }
351
352 pub fn is_empty(&self) -> bool {
354 self.opt_root_idx.is_none()
355 }
356
357 pub fn is_full(&self) -> bool {
359 debug_assert!(self.len() <= self.capacity());
360 self.len() == self.capacity()
361 }
362
363 pub fn first_key_value(&self) -> Option<(&K, &V)>
366 where
367 K: Ord,
368 {
369 if !self.is_empty() {
370 let node = &self.arena[self.min_idx];
371 Some((node.key(), node.val()))
372 } else {
373 None
374 }
375 }
376
377 pub fn first_key(&self) -> Option<&K>
379 where
380 K: Ord,
381 {
382 self.first_key_value().map(|(k, _)| k)
383 }
384
385 pub fn pop_first(&mut self) -> Option<(K, V)>
388 where
389 K: Ord,
390 {
391 self.priv_remove_by_idx(self.min_idx)
392 }
393
394 pub fn last_key_value(&self) -> Option<(&K, &V)>
397 where
398 K: Ord,
399 {
400 if !self.is_empty() {
401 let node = &self.arena[self.max_idx];
402 Some((node.key(), node.val()))
403 } else {
404 None
405 }
406 }
407
408 pub fn last_key(&self) -> Option<&K>
410 where
411 K: Ord,
412 {
413 self.last_key_value().map(|(k, _)| k)
414 }
415
416 pub fn pop_last(&mut self) -> Option<(K, V)>
419 where
420 K: Ord,
421 {
422 self.priv_remove_by_idx(self.max_idx)
423 }
424
425 pub fn len(&self) -> usize {
427 self.curr_size
428 }
429
430 pub fn rebal_cnt(&self) -> usize {
433 self.rebal_cnt
434 }
435
436 #[cfg(not(feature = "fast_rebalance"))]
441 pub(crate) fn priv_remove_by_idx(&mut self, idx: usize) -> Option<(K, V)> {
442 if self.arena.is_occupied(idx) {
443 let node = &self.arena[idx];
444 let ngh: NodeGetHelper<Idx> = self.internal_get(None, node.key());
445 debug_assert!(
446 ngh.node_idx().unwrap() == idx,
447 "By-key retrieval index doesn't match arena storage index!"
448 );
449 self.priv_remove(None, ngh)
450 } else {
451 None
452 }
453 }
454
455 #[cfg(feature = "fast_rebalance")]
458 pub(crate) fn priv_remove_by_idx(&mut self, idx: usize) -> Option<(K, V)> {
459 if self.arena.is_occupied(idx) {
460 let node = &self.arena[idx];
461 let mut path = Arena::<K, V, Idx, N>::new_idx_vec();
462 let ngh = self.internal_get(Some(&mut path), node.key());
463 debug_assert!(
464 ngh.node_idx().unwrap() == idx,
465 "By-key retrieval index doesn't match arena storage index!"
466 );
467 self.priv_remove(Some(&path), ngh)
468 } else {
469 None
470 }
471 }
472
473 pub(crate) fn flatten_subtree_to_sorted_idxs<U: SmallUnsigned + Default + Copy>(
475 &self,
476 idx: usize,
477 ) -> ArrayVec<[U; N]> {
478 let mut subtree_worklist = array_vec![[U; N] => U::checked_from(idx)];
479 let mut subtree_flattened = array_vec![[U; N] => U::checked_from(idx)];
480
481 while let Some(idx) = subtree_worklist.pop() {
482 let node = &self.arena[idx.usize()];
483
484 if let Some(left_idx) = node.left_idx() {
485 let left = U::checked_from(left_idx);
486 subtree_worklist.push(left);
487 subtree_flattened.push(left);
488 }
489
490 if let Some(right_idx) = node.right_idx() {
491 let right = U::checked_from(right_idx);
492 subtree_worklist.push(right);
493 subtree_flattened.push(right);
494 }
495 }
496
497 subtree_flattened
500 .sort_unstable_by(|a, b| self.arena[a.usize()].key().cmp(self.arena[b.usize()].key()));
501
502 subtree_flattened
503 }
504
505 pub(crate) fn sort_arena(&mut self) {
507 if let Some(root_idx) = self.opt_root_idx {
508 let mut sort_metadata = self
509 .arena
510 .iter()
511 .filter(|n| n.is_some())
512 .map(|n| n.as_ref().unwrap())
513 .map(|n| self.internal_get(None, n.key()))
514 .collect::<ArrayVec<[NodeGetHelper<usize>; N]>>();
515
516 sort_metadata.sort_unstable_by_key(|ngh| self.arena[ngh.node_idx().unwrap()].key());
517 let sorted_root_idx = self.arena.sort(root_idx, sort_metadata);
518
519 self.opt_root_idx = Some(sorted_root_idx);
520 self.update_max_idx();
521 self.update_min_idx();
522 }
523 }
524
525 pub(crate) fn intersect_cnt(&self, other: &SgTree<K, V, N>) -> usize {
527 self.iter().filter(|(k, _)| other.contains_key(k)).count()
528 }
529
530 pub(crate) fn max_capacity() -> usize {
532 Idx::MAX as usize
533 }
534
535 pub(crate) fn range_search<T, R>(&self, range: &R) -> ArrayVec<[usize; N]>
537 where
538 T: Ord + ?Sized,
539 R: RangeBounds<T>,
540 K: Borrow<T> + Ord,
541 {
542 let mut node_idxs = ArrayVec::<[usize; N]>::new();
543
544 for (idx, node) in self
545 .arena
546 .iter()
547 .enumerate()
548 .filter_map(|(i, node)| Some((i, node.as_ref()?)))
549 {
550 if range.contains(node.key().borrow()) {
551 node_idxs.push(idx);
552 }
553 }
554
555 node_idxs.sort_unstable_by_key(|idx| self.arena[*idx].key());
556
557 node_idxs
558 }
559
560 pub(crate) fn assert_valid_range<T, R>(range: &R)
562 where
563 T: Ord + ?Sized,
564 R: RangeBounds<T>,
565 K: Borrow<T> + Ord,
566 {
567 match (range.start_bound(), range.end_bound()) {
568 (Included(start), Included(end))
569 | (Included(start), Excluded(end))
570 | (Excluded(start), Included(end)) => {
571 if start > end {
572 panic!("range start is greater than range end");
573 }
574 }
575 (Excluded(start), Excluded(end)) => {
576 if start == end {
577 panic!("range start and end are equal and excluded");
578 }
579 }
580 _ => {}
581 }
582 }
583
584 pub(crate) fn internal_get<Q, U: SmallUnsigned + Default + Copy>(
587 &self,
588 mut opt_path: Option<&mut ArrayVec<[U; N]>>,
589 key: &Q,
590 ) -> NodeGetHelper<U>
591 where
592 K: Borrow<Q> + Ord,
593 Q: Ord + ?Sized,
594 {
595 match self.opt_root_idx {
596 Some(root_idx) => {
597 let mut opt_parent_idx = None;
598 let mut curr_idx = root_idx;
599 let mut is_right_child = false;
600 loop {
601 let node = &self.arena[curr_idx];
602
603 if let Some(ref mut path) = opt_path {
604 path.push(U::checked_from(curr_idx));
605 }
606
607 match key.cmp(node.key().borrow()) {
608 Ordering::Less => match node.left_idx() {
609 Some(lt_idx) => {
610 opt_parent_idx = Some(curr_idx);
611 curr_idx = lt_idx;
612 is_right_child = false;
613 }
614 None => {
615 if let Some(path) = opt_path {
616 path.clear(); }
618
619 return NodeGetHelper::new(None, None, false);
620 }
621 },
622 Ordering::Equal => {
623 if let Some(path) = opt_path {
624 path.pop(); }
626
627 return NodeGetHelper::new(
628 Some(curr_idx),
629 opt_parent_idx,
630 is_right_child,
631 );
632 }
633 Ordering::Greater => match node.right_idx() {
634 Some(gt_idx) => {
635 opt_parent_idx = Some(curr_idx);
636 curr_idx = gt_idx;
637 is_right_child = true;
638 }
639 None => {
640 if let Some(path) = opt_path {
641 path.clear(); }
643
644 return NodeGetHelper::new(None, None, false);
645 }
646 },
647 }
648 }
649 }
650 None => NodeGetHelper::new(None, None, false),
651 }
652 }
653
654 pub(crate) fn internal_balancing_insert<U: Default + Copy + Ord + Sub + SmallUnsigned>(
659 &mut self,
660 key: K,
661 val: V,
662 ) -> (Option<V>, usize) {
663 let mut path: ArrayVec<[U; N]> = Arena::<K, V, U, N>::new_idx_vec();
664 let (opt_val, ngh) = self.priv_insert(&mut path, key, val);
665
666 #[cfg(feature = "fast_rebalance")]
667 {
668 for parent_idx in &path {
670 let parent_node = &mut self.arena[(*parent_idx).usize()];
671 parent_node.set_subtree_size(parent_node.subtree_size() + 1);
672 }
673 }
674
675 if path.len() > self.alpha_balance_depth(self.max_size) {
677 if let Some(scapegoat_idx) = self.find_scapegoat(&path) {
678 self.rebuild::<U>(scapegoat_idx);
679 }
680 }
681
682 debug_assert!(ngh.node_idx().is_some());
683 let new_node_idx = ngh.node_idx().expect("Inserted node index must be `Some`");
684 (opt_val, new_node_idx)
685 }
686
687 fn priv_insert<U: SmallUnsigned + Default + Copy>(
696 &mut self,
697 path: &mut ArrayVec<[U; N]>,
698 key: K,
699 val: V,
700 ) -> (Option<V>, NodeGetHelper<U>) {
701 match self.opt_root_idx {
702 Some(idx) => {
704 let mut curr_idx = idx;
706 let mut opt_val = None;
707 let ngh: NodeGetHelper<U>;
708 loop {
709 let curr_node = &mut self.arena[curr_idx];
710 path.push(U::checked_from(curr_idx));
711
712 match key.cmp(curr_node.key()) {
713 Ordering::Less => {
714 match curr_node.left_idx() {
715 Some(left_idx) => curr_idx = left_idx,
716 None => {
717 let mut new_min_found = false;
719 let min_node = &self.arena[self.min_idx];
720 if &key < min_node.key() {
721 new_min_found = true;
722 }
723
724 let new_node_idx = self.arena.add(key, val);
726
727 if new_min_found {
729 self.min_idx = new_node_idx;
730 }
731
732 ngh = NodeGetHelper::new(
733 Some(new_node_idx),
734 Some(curr_idx),
735 false,
736 );
737 break;
738 }
739 }
740 }
741 Ordering::Equal => {
742 curr_node.set_key(key);
744
745 opt_val = Some(curr_node.take_val());
747 curr_node.set_val(val);
748
749 ngh = NodeGetHelper::new(Some(curr_idx), None, false);
751 break;
752 }
753 Ordering::Greater => {
754 match curr_node.right_idx() {
755 Some(right_idx) => curr_idx = right_idx,
756 None => {
757 let mut new_max_found = false;
759 let max_node = &self.arena[self.max_idx];
760 if &key > max_node.key() {
761 new_max_found = true;
762 }
763
764 let new_node_idx = self.arena.add(key, val);
766
767 if new_max_found {
769 self.max_idx = new_node_idx;
770 }
771
772 ngh = NodeGetHelper::new(
773 Some(new_node_idx),
774 Some(curr_idx),
775 true,
776 );
777 break;
778 }
779 }
780 }
781 }
782 }
783
784 if let Some(parent_idx) = ngh.parent_idx() {
786 self.curr_size += 1;
787 self.max_size += 1;
788
789 let parent_node = &mut self.arena[parent_idx];
790 if ngh.is_right_child() {
791 parent_node.set_right_idx(ngh.node_idx());
792 } else {
793 parent_node.set_left_idx(ngh.node_idx());
794 }
795 }
796
797 (opt_val, ngh)
799 }
800
801 None => {
803 debug_assert_eq!(self.curr_size, 0);
804 self.curr_size += 1;
805 self.max_size += 1;
806
807 let root_idx = self.arena.add(key, val);
808 self.opt_root_idx = Some(root_idx);
809 self.max_idx = root_idx;
810 self.min_idx = root_idx;
811
812 let ngh = NodeGetHelper::new(Some(root_idx), None, false);
813 (None, ngh)
814 }
815 }
816 }
817
818 #[cfg(not(feature = "fast_rebalance"))]
820 fn priv_remove_by_key<Q>(&mut self, key: &Q) -> Option<(K, V)>
821 where
822 K: Borrow<Q> + Ord,
823 Q: Ord + ?Sized,
824 {
825 let ngh: NodeGetHelper<Idx> = self.internal_get(None, key);
826 self.priv_remove(None, ngh)
827 }
828
829 #[cfg(feature = "fast_rebalance")]
831 fn priv_remove_by_key<Q>(&mut self, key: &Q) -> Option<(K, V)>
832 where
833 K: Borrow<Q> + Ord,
834 Q: Ord + ?Sized,
835 {
836 let mut path = Arena::<K, V, Idx, N>::new_idx_vec();
837 let ngh = self.internal_get(Some(&mut path), key);
838 self.priv_remove(Some(&path), ngh)
839 }
840
841 #[allow(unused_variables)] fn priv_remove<U: SmallUnsigned + Default + Copy>(
844 &mut self,
845 opt_path: Option<&ArrayVec<[U; N]>>,
846 ngh: NodeGetHelper<U>,
847 ) -> Option<(K, V)> {
848 match ngh.node_idx() {
849 Some(node_idx) => {
850 let node_to_remove = &self.arena[node_idx];
851
852 let node_to_remove_left_idx = node_to_remove.left_idx();
854 let mut node_to_remove_right_idx = node_to_remove.right_idx();
855
856 let new_child = match (node_to_remove_left_idx, node_to_remove_right_idx) {
857 (None, None) => None,
859 (Some(left_idx), None) => Some(left_idx),
861 (None, Some(right_idx)) => Some(right_idx),
863 (Some(_), Some(right_idx)) => {
868 let mut min_idx = right_idx;
869 let mut min_parent_idx = node_idx;
870
871 #[cfg(feature = "fast_rebalance")]
872 let min_node_subtree_size = node_to_remove.subtree_size() - 1;
873
874 loop {
875 let min_node = &self.arena[min_idx];
876 match min_node.left_idx() {
877 Some(lt_idx) => {
879 min_parent_idx = min_idx;
880 min_idx = lt_idx;
881 }
882 None => match min_node.right_idx() {
884 Some(_) => {
885 let unlink_new_child = min_node.right_idx();
886 if min_parent_idx == node_idx {
887 node_to_remove_right_idx = unlink_new_child;
888 } else {
889 let min_parent_node = &mut self.arena[min_parent_idx];
890 min_parent_node.set_left_idx(unlink_new_child);
891
892 #[cfg(feature = "fast_rebalance")]
893 {
894 min_parent_node.set_subtree_size(
895 min_parent_node.subtree_size() - 1,
896 );
897 }
898 }
899 break;
900 }
901 None => {
902 if min_parent_idx == node_idx {
903 node_to_remove_right_idx = None;
904 } else {
905 let min_parent_node = &mut self.arena[min_parent_idx];
906 min_parent_node.set_left_idx(None);
907
908 #[cfg(feature = "fast_rebalance")]
909 {
910 min_parent_node.set_subtree_size(
911 min_parent_node.subtree_size() - 1,
912 );
913 }
914 }
915 break;
916 }
917 },
918 }
919 }
920
921 let min_node = &mut self.arena[min_idx];
923 min_node.set_right_idx(node_to_remove_right_idx);
924 min_node.set_left_idx(node_to_remove_left_idx);
925
926 #[cfg(feature = "fast_rebalance")]
927 {
928 min_node.set_subtree_size(min_node_subtree_size);
929 }
930
931 Some(min_idx)
933 }
934 };
935
936 match ngh.parent_idx() {
938 Some(parent_idx) => {
939 let parent_node = &mut self.arena[parent_idx];
940 if ngh.is_right_child() {
941 parent_node.set_right_idx(new_child);
942 } else {
943 parent_node.set_left_idx(new_child);
944 }
945 }
946 None => {
947 self.opt_root_idx = new_child;
948 }
949 }
950
951 let mut removed_node = self.arena.hard_remove(node_idx);
953 self.curr_size -= 1;
954
955 if node_idx == self.min_idx {
957 self.update_min_idx();
958 } else if node_idx == self.max_idx {
959 self.update_max_idx();
960 }
961
962 #[cfg(feature = "fast_rebalance")]
964 {
965 debug_assert!(opt_path.is_some());
966 if let Some(path) = opt_path {
967 for parent_idx in path {
968 let parent_node = &mut self.arena[(*parent_idx).usize()];
969 debug_assert!(parent_node.subtree_size() > 1);
970 parent_node.set_subtree_size(parent_node.subtree_size() - 1);
971 }
972 }
973 }
974
975 Some((removed_node.take_key(), removed_node.take_val()))
976 }
977 None => None,
978 }
979 }
980
981 fn priv_drain_filter<Q, F>(&mut self, mut pred: F) -> Self
983 where
984 K: Borrow<Q> + Ord,
985 Q: Ord + ?Sized,
986 F: FnMut(&Q, &mut V) -> bool,
987 {
988 let mut key_idxs = Arena::<K, V, Idx, N>::new_idx_vec();
1000 let mut remove_idxs = Arena::<K, V, Idx, N>::new_idx_vec();
1001
1002 self.sort_arena();
1004
1005 for (k, _) in &(*self) {
1007 let ngh: NodeGetHelper<Idx> = self.internal_get(None, k.borrow());
1008 debug_assert!(ngh.node_idx().is_some());
1009 key_idxs.push(Idx::checked_from(ngh.node_idx().unwrap()));
1010 }
1011
1012 for (i, (k, v)) in self.iter_mut().enumerate() {
1014 if pred(k.borrow(), v) {
1015 remove_idxs.push(key_idxs[i]);
1016 }
1017 }
1018
1019 let mut drained_sgt = Self::new();
1021 for i in remove_idxs {
1022 if let Some((k, v)) = self.priv_remove_by_idx(i.usize()) {
1023 drained_sgt
1024 .try_insert(k, v)
1025 .expect("Stack-storage capacity exceeded!");
1026 }
1027 }
1028
1029 drained_sgt
1030 }
1031
1032 fn update_min_idx(&mut self) {
1034 match self.opt_root_idx {
1035 Some(root_idx) => {
1036 let mut curr_idx = root_idx;
1037 loop {
1038 let node = &self.arena[curr_idx];
1039 match node.left_idx() {
1040 Some(lt_idx) => curr_idx = lt_idx,
1041 None => {
1042 self.min_idx = curr_idx;
1043 return;
1044 }
1045 }
1046 }
1047 }
1048 None => self.min_idx = 0,
1049 }
1050 }
1051
1052 fn update_max_idx(&mut self) {
1054 match self.opt_root_idx {
1055 Some(root_idx) => {
1056 let mut curr_idx = root_idx;
1057 loop {
1058 let node = &self.arena[curr_idx];
1059 match node.right_idx() {
1060 Some(gt_idx) => curr_idx = gt_idx,
1061 None => {
1062 self.max_idx = curr_idx;
1063 return;
1064 }
1065 }
1066 }
1067 }
1068 None => self.max_idx = 0,
1069 }
1070 }
1071
1072 #[cfg(not(feature = "alt_impl"))]
1075 fn find_scapegoat<U: SmallUnsigned + Default>(&self, path: &[U]) -> Option<usize> {
1076 if path.len() <= 1 {
1077 return None;
1078 }
1079
1080 let mut node_subtree_size = 1; let mut parent_path_idx = path.len() - 1; let mut parent_subtree_size = self.get_subtree_size::<U>(path[parent_path_idx].usize());
1083
1084 while (parent_path_idx > 0)
1085 && (self.alpha_denom * node_subtree_size as f32)
1086 <= (self.alpha_num * parent_subtree_size as f32)
1087 {
1088 node_subtree_size = parent_subtree_size;
1089 parent_path_idx -= 1;
1090 parent_subtree_size = self.get_subtree_size_differential::<U>(
1091 path[parent_path_idx].usize(), path[parent_path_idx + 1].usize(), node_subtree_size, );
1095
1096 debug_assert!(parent_subtree_size > node_subtree_size);
1097 }
1098
1099 Some(path[parent_path_idx].usize())
1100 }
1101
1102 #[cfg(feature = "alt_impl")]
1105 fn find_scapegoat<U: SmallUnsigned + Default>(&self, path: &[U]) -> Option<usize> {
1106 if path.len() <= 1 {
1107 return None;
1108 }
1109
1110 let mut i = 0;
1111 let mut node_subtree_size = 1; let mut parent_path_idx = path.len() - 1; let mut parent_subtree_size = self.get_subtree_size::<U>(path[parent_path_idx].usize());
1114
1115 while (parent_path_idx > 0) && (i <= self.alpha_balance_depth(node_subtree_size)) {
1116 node_subtree_size = parent_subtree_size;
1117 parent_path_idx -= 1;
1118 i += 1;
1119 parent_subtree_size = self.get_subtree_size_differential::<U>(
1120 path[parent_path_idx].usize(), path[parent_path_idx + 1].usize(), node_subtree_size, );
1124
1125 debug_assert!(parent_subtree_size > node_subtree_size);
1126 }
1127
1128 Some(path[parent_path_idx].usize())
1129 }
1130
1131 #[cfg(not(feature = "fast_rebalance"))]
1133 fn get_subtree_size<U: SmallUnsigned + Default>(&self, idx: usize) -> usize {
1134 let mut subtree_worklist = array_vec![[U; N] => U::checked_from(idx)];
1135 let mut subtree_size = 0;
1136
1137 while let Some(idx) = subtree_worklist.pop() {
1138 let node = &self.arena[idx.usize()];
1139 subtree_size += 1;
1140
1141 if let Some(left_idx) = node.left_idx() {
1142 subtree_worklist.push(U::checked_from(left_idx));
1143 }
1144
1145 if let Some(right_idx) = node.right_idx() {
1146 subtree_worklist.push(U::checked_from(right_idx));
1147 }
1148 }
1149
1150 subtree_size
1151 }
1152
1153 #[cfg(feature = "fast_rebalance")]
1155 fn get_subtree_size<U: SmallUnsigned>(&self, idx: usize) -> usize {
1156 self.arena[idx].subtree_size()
1157 }
1158
1159 #[cfg(not(feature = "fast_rebalance"))]
1161 fn get_subtree_size_differential<U: SmallUnsigned + Default>(
1162 &self,
1163 parent_idx: usize,
1164 child_idx: usize,
1165 child_subtree_size: usize,
1166 ) -> usize {
1167 let parent = &self.arena[parent_idx];
1168
1169 debug_assert!(
1170 (parent.right_idx() == Some(child_idx)) || (parent.left_idx() == Some(child_idx))
1171 );
1172
1173 let mut is_right_child = false;
1174 if let Some(right_child_idx) = parent.right_idx() {
1175 if right_child_idx == child_idx {
1176 is_right_child = true;
1177 }
1178 }
1179
1180 let other_child_subtree_size = if is_right_child {
1181 match parent.left_idx() {
1182 Some(idx) => self.get_subtree_size::<U>(idx),
1183 None => 0,
1184 }
1185 } else {
1186 match parent.right_idx() {
1187 Some(idx) => self.get_subtree_size::<U>(idx),
1188 None => 0,
1189 }
1190 };
1191
1192 let computed_subtree_size = child_subtree_size + other_child_subtree_size + 1;
1193
1194 debug_assert_eq!(
1195 computed_subtree_size,
1196 self.get_subtree_size::<U>(parent_idx)
1197 );
1198
1199 computed_subtree_size
1200 }
1201
1202 #[cfg(feature = "fast_rebalance")]
1205 fn get_subtree_size_differential<U: SmallUnsigned>(
1206 &self,
1207 parent_idx: usize,
1208 _child_idx: usize,
1209 _child_subtree_size: usize,
1210 ) -> usize {
1211 self.get_subtree_size::<U>(parent_idx)
1212 }
1213
1214 fn rebuild<U: Copy + Ord + Sub + SmallUnsigned + Default>(&mut self, idx: usize) {
1216 let sorted_sub = self.flatten_subtree_to_sorted_idxs(idx);
1217 self.rebalance_subtree_from_sorted_idxs::<U>(idx, &sorted_sub);
1218 self.rebal_cnt = self.rebal_cnt.wrapping_add(1);
1219 }
1220
1221 fn rebalance_subtree_from_sorted_idxs<U: Copy + Ord + Default + Sub + SmallUnsigned>(
1224 &mut self,
1225 old_subtree_root_idx: usize,
1226 sorted_arena_idxs: &[usize],
1227 ) {
1228 if sorted_arena_idxs.len() <= 1 {
1229 return;
1230 }
1231
1232 debug_assert!(
1233 self.opt_root_idx.is_some(),
1234 "Internal invariant failed: rebalance of multi-node tree without root!"
1235 );
1236
1237 let sorted_last_idx = sorted_arena_idxs.len() - 1;
1238 let subtree_root_sorted_idx = sorted_last_idx / 2;
1239 let subtree_root_arena_idx = sorted_arena_idxs[subtree_root_sorted_idx];
1240 let mut subtree_worklist = ArrayVec::<[(U, NodeRebuildHelper<U>); N]>::default();
1241
1242 subtree_worklist.push((
1244 U::checked_from(subtree_root_sorted_idx),
1245 NodeRebuildHelper::new(0, sorted_last_idx),
1246 ));
1247
1248 if let Some(root_idx) = self.opt_root_idx {
1250 if sorted_arena_idxs.contains(&root_idx) {
1251 self.opt_root_idx = Some(subtree_root_arena_idx);
1252 } else {
1253 let old_subtree_root = &self.arena[old_subtree_root_idx];
1254 let ngh: NodeGetHelper<U> = self.internal_get(None, old_subtree_root.key());
1255 debug_assert!(
1256 ngh.parent_idx().is_some(),
1257 "Internal invariant failed: rebalance of non-root parent-less node!"
1258 );
1259 if let Some(parent_idx) = ngh.parent_idx() {
1260 let parent_node = &mut self.arena[parent_idx];
1261 if ngh.is_right_child() {
1262 parent_node.set_right_idx(Some(subtree_root_arena_idx));
1263 } else {
1264 parent_node.set_left_idx(Some(subtree_root_arena_idx));
1265 }
1266 }
1267 }
1268 }
1269
1270 while let Some((sorted_idx, parent_nrh)) = subtree_worklist.pop() {
1272 let parent_node = &mut self.arena[sorted_arena_idxs[sorted_idx.usize()]];
1273
1274 parent_node.set_left_idx(None);
1275 parent_node.set_right_idx(None);
1276
1277 if parent_nrh.low_idx < parent_nrh.mid_idx {
1279 let child_nrh: NodeRebuildHelper<U> = NodeRebuildHelper::new(
1280 parent_nrh.low_idx.usize(),
1281 parent_nrh.mid_idx.usize() - 1,
1282 );
1283 parent_node.set_left_idx(Some(sorted_arena_idxs[child_nrh.mid_idx.usize()]));
1284 subtree_worklist.push((child_nrh.mid_idx, child_nrh));
1285 }
1286
1287 if parent_nrh.mid_idx < parent_nrh.high_idx {
1289 let child_nrh: NodeRebuildHelper<U> = NodeRebuildHelper::new(
1290 parent_nrh.mid_idx.usize() + 1,
1291 parent_nrh.high_idx.usize(),
1292 );
1293 parent_node.set_right_idx(Some(sorted_arena_idxs[child_nrh.mid_idx.usize()]));
1294 subtree_worklist.push((child_nrh.mid_idx, child_nrh));
1295 }
1296
1297 #[cfg(feature = "fast_rebalance")]
1299 {
1300 parent_node
1301 .set_subtree_size(parent_nrh.high_idx.usize() - parent_nrh.low_idx.usize() + 1);
1302 debug_assert!(parent_node.subtree_size() >= 1);
1303 }
1304 }
1305
1306 debug_assert!(
1307 self.get_subtree_size::<U>(subtree_root_arena_idx) == (sorted_arena_idxs.len()),
1308 "Internal invariant failed: rebalance changed node count! {} -> {}",
1309 self.get_subtree_size::<U>(subtree_root_arena_idx),
1310 sorted_arena_idxs.len()
1311 );
1312 }
1313
1314 fn alpha_balance_depth(&self, val: usize) -> usize {
1316 (val as f32).log(self.alpha_denom / self.alpha_num).floor() as usize
1318 }
1319}
1320
1321impl<K, V, const N: usize> Debug for SgTree<K, V, N>
1325where
1326 K: Ord + Debug + Default,
1327 V: Debug + Default,
1328{
1329 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1330 f.debug_map().entries(self.iter()).finish()
1331 }
1332}
1333
1334impl<K, V, const N: usize> Default for SgTree<K, V, N>
1336where
1337 K: Ord + Default,
1338 V: Default,
1339{
1340 fn default() -> Self {
1341 Self::new()
1342 }
1343}
1344
1345impl<K, V, const N: usize> From<[(K, V); N]> for SgTree<K, V, N>
1347where
1348 K: Ord + Default,
1349 V: Default,
1350{
1351 fn from(arr: [(K, V); N]) -> Self {
1352 IntoIterator::into_iter(arr).collect()
1353 }
1354}
1355
1356impl<K, V, Q, const N: usize> Index<&Q> for SgTree<K, V, N>
1387where
1388 K: Borrow<Q> + Ord + Default,
1389 Q: Ord + ?Sized,
1390 V: Default,
1391{
1392 type Output = V;
1393
1394 fn index(&self, key: &Q) -> &Self::Output {
1400 self.get(key).expect("No value found for key")
1401 }
1402}
1403
1404impl<K, V, const N: usize> Extend<(K, V)> for SgTree<K, V, N>
1406where
1407 K: Ord + Default,
1408 V: Default,
1409{
1410 fn extend<I: IntoIterator<Item = (K, V)>>(&mut self, iter: I) {
1411 iter.into_iter().for_each(move |(k, v)| {
1412 self.try_insert(k, v)
1413 .expect("Stack-storage capacity exceeded!");
1414 });
1415 }
1416}
1417
1418impl<'a, K, V, const N: usize> Extend<(&'a K, &'a V)> for SgTree<K, V, N>
1420where
1421 K: Ord + Copy + Default,
1422 V: Copy + Default,
1423{
1424 fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
1425 self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
1426 }
1427}
1428
1429impl<K, V, const N: usize> PartialEq for SgTree<K, V, N>
1431where
1432 K: Ord + PartialEq + Default,
1433 V: PartialEq + Default,
1434{
1435 fn eq(&self, other: &SgTree<K, V, N>) -> bool {
1436 self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
1437 }
1438}
1439
1440impl<K, V, const N: usize> Eq for SgTree<K, V, N>
1442where
1443 K: Ord + Eq + Default,
1444 V: Eq + Default,
1445{
1446}
1447
1448impl<K, V, const N: usize> PartialOrd for SgTree<K, V, N>
1450where
1451 K: Ord + PartialOrd + Default,
1452 V: PartialOrd + Default,
1453{
1454 fn partial_cmp(&self, other: &SgTree<K, V, N>) -> Option<Ordering> {
1455 self.iter().partial_cmp(other.iter())
1456 }
1457}
1458
1459impl<K, V, const N: usize> Ord for SgTree<K, V, N>
1461where
1462 K: Ord + Default,
1463 V: Ord + Default,
1464{
1465 fn cmp(&self, other: &SgTree<K, V, N>) -> Ordering {
1466 self.iter().cmp(other.iter())
1467 }
1468}
1469
1470impl<K, V, const N: usize> Hash for SgTree<K, V, N>
1472where
1473 K: Ord + Hash + Default,
1474 V: Hash + Default,
1475{
1476 fn hash<H: Hasher>(&self, state: &mut H) {
1477 for i in self {
1478 i.hash(state);
1479 }
1480 }
1481}
1482
1483impl<K, V, const N: usize> FromIterator<(K, V)> for SgTree<K, V, N>
1487where
1488 K: Ord + Default,
1489 V: Default,
1490{
1491 fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
1492 let mut sgt = SgTree::new();
1493
1494 for (k, v) in iter {
1495 sgt.try_insert(k, v)
1496 .expect("Stack-storage capacity exceeded!");
1497 }
1498
1499 sgt
1500 }
1501}
1502
1503impl<'a, K, V, const N: usize> IntoIterator for &'a mut SgTree<K, V, N>
1505where
1506 K: Ord + Default,
1507 V: Default,
1508{
1509 type Item = (&'a K, &'a mut V);
1510 type IntoIter = IterMut<'a, K, V, N>;
1511
1512 fn into_iter(self) -> Self::IntoIter {
1513 self.iter_mut()
1514 }
1515}
1516
1517impl<'a, K, V, const N: usize> IntoIterator for &'a SgTree<K, V, N>
1519where
1520 K: Ord + Default,
1521 V: Default,
1522{
1523 type Item = (&'a K, &'a V);
1524 type IntoIter = Iter<'a, K, V, N>;
1525
1526 fn into_iter(self) -> Self::IntoIter {
1527 self.iter()
1528 }
1529}
1530
1531impl<K, V, const N: usize> IntoIterator for SgTree<K, V, N>
1533where
1534 K: Ord + Default,
1535 V: Default,
1536{
1537 type Item = (K, V);
1538 type IntoIter = IntoIter<K, V, N>;
1539
1540 fn into_iter(self) -> Self::IntoIter {
1541 IntoIter::new(self)
1542 }
1543}