1#![cfg_attr(
2 test,
3 deny(
4 missing_docs,
5 future_incompatible,
6 nonstandard_style,
7 rust_2018_idioms,
8 missing_copy_implementations,
9 trivial_casts,
10 trivial_numeric_casts,
11 unused_qualifications,
12 )
13)]
14#![cfg_attr(test, deny(
15 clippy::cast_lossless,
16 clippy::cast_possible_truncation,
17 clippy::cast_possible_wrap,
18 clippy::cast_precision_loss,
19 clippy::cast_sign_loss,
20 clippy::decimal_literal_representation,
21 clippy::doc_markdown,
22 clippy::empty_enum,
24 clippy::explicit_into_iter_loop,
25 clippy::explicit_iter_loop,
26 clippy::expl_impl_clone_on_copy,
27 clippy::fallible_impl_from,
28 clippy::filter_map_next,
29 clippy::float_arithmetic,
30 clippy::get_unwrap,
31 clippy::if_not_else,
32 clippy::indexing_slicing,
33 clippy::inline_always,
34 clippy::integer_arithmetic,
35 clippy::invalid_upcast_comparisons,
36 clippy::items_after_statements,
37 clippy::manual_find_map,
38 clippy::map_entry,
39 clippy::map_flatten,
40 clippy::match_like_matches_macro,
41 clippy::match_same_arms,
42 clippy::maybe_infinite_iter,
43 clippy::mem_forget,
44 clippy::module_name_repetitions,
46 clippy::multiple_inherent_impl,
47 clippy::mut_mut,
48 clippy::needless_borrow,
49 clippy::needless_continue,
50 clippy::needless_pass_by_value,
51 clippy::non_ascii_literal,
52 clippy::path_buf_push_overwrite,
53 clippy::redundant_closure_for_method_calls,
55 clippy::shadow_reuse,
56 clippy::shadow_same,
57 clippy::shadow_unrelated,
58 clippy::single_match_else,
59 clippy::string_add,
60 clippy::string_add_assign,
61 clippy::type_repetition_in_bounds,
62 clippy::unicode_not_nfc,
63 clippy::unimplemented,
64 clippy::unseparated_literal_suffix,
65 clippy::used_underscore_binding,
66 clippy::wildcard_dependencies,
67))]
68#![cfg_attr(
69 test,
70 warn(
71 clippy::missing_const_for_fn,
72 clippy::multiple_crate_versions,
73 clippy::wildcard_enum_match_arm,
74 )
75)]
76
77#[cfg(feature = "serde")]
104mod serde;
105
106#[cfg(not(feature = "fault_injection"))]
107#[inline]
108const fn debug_delay() -> bool {
109 false
110}
111
112#[cfg(feature = "fault_injection")]
117fn debug_delay() -> bool {
118 use std::thread;
119 use std::time::Duration;
120
121 use rand::{thread_rng, Rng};
122
123 let mut rng = thread_rng();
124
125 match rng.gen_range(0..100) {
126 0..=98 => false,
127 _ => {
128 thread::yield_now();
129 true
130 }
131 }
132}
133
134use stack_map::StackMap;
135
136use std::borrow::Borrow;
137use std::fmt;
138use std::mem::size_of;
139use std::num::{
140 NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
141 NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
142};
143use std::ops::{Bound, Deref};
144use std::ptr::NonNull;
145use std::sync::{
146 atomic::{AtomicPtr, AtomicUsize, Ordering},
147 Arc,
148};
149
150#[cfg(feature = "timing")]
151use std::time::{Duration, Instant};
152
153use ebr::{Ebr, Guard};
154
155const MERGE_SIZE: usize = 1;
157
158#[derive(Debug)]
159enum Deferred<
160 K: 'static + Clone + Minimum + Send + Sync + Ord,
161 V: 'static + Clone + Send + Sync,
162 const FANOUT: usize,
163> {
164 #[allow(unused)]
165 Node(Box<Node<K, V, FANOUT>>),
166 BoxedAtomicPtr(BoxedAtomicPtr<K, V, FANOUT>),
167}
168
169impl<
170 K: 'static + Clone + Minimum + Send + Sync + Ord,
171 V: 'static + Clone + Send + Sync,
172 const FANOUT: usize,
173 > Drop for Deferred<K, V, FANOUT>
174{
175 fn drop(&mut self) {
176 if let Deferred::BoxedAtomicPtr(id) = self {
177 assert!(!id.0.is_null());
178 let reclaimed: Box<AtomicPtr<Node<K, V, FANOUT>>> =
179 unsafe { Box::from_raw(id.0 as *mut _) };
180 drop(reclaimed);
181 }
182 }
183}
184
185#[derive(Debug, Clone, Eq)]
186struct BoxedAtomicPtr<
187 K: 'static + Clone + Minimum + Send + Sync + Ord,
188 V: 'static + Clone + Send + Sync,
189 const FANOUT: usize,
190>(*const AtomicPtr<Node<K, V, FANOUT>>);
191
192impl<
193 K: 'static + Clone + Minimum + Send + Sync + Ord,
194 V: 'static + Clone + Send + Sync,
195 const FANOUT: usize,
196 > Copy for BoxedAtomicPtr<K, V, FANOUT>
197{
198}
199
200impl<
201 K: 'static + Clone + Minimum + Send + Sync + Ord,
202 V: 'static + Clone + Send + Sync,
203 const FANOUT: usize,
204 > PartialEq for BoxedAtomicPtr<K, V, FANOUT>
205{
206 fn eq(&self, other: &Self) -> bool {
207 self.0 == other.0
208 }
209}
210
211unsafe impl<
212 K: 'static + Clone + Minimum + Send + Sync + Ord,
213 V: 'static + Clone + Send + Sync,
214 const FANOUT: usize,
215 > Send for BoxedAtomicPtr<K, V, FANOUT>
216{
217}
218
219unsafe impl<
220 K: 'static + Clone + Minimum + Send + Sync + Ord,
221 V: 'static + Clone + Send + Sync,
222 const FANOUT: usize,
223 > Sync for BoxedAtomicPtr<K, V, FANOUT>
224{
225}
226
227impl<
228 K: 'static + Clone + Minimum + Send + Sync + Ord,
229 V: 'static + Clone + Send + Sync,
230 const FANOUT: usize,
231 > Deref for BoxedAtomicPtr<K, V, FANOUT>
232{
233 type Target = AtomicPtr<Node<K, V, FANOUT>>;
234
235 fn deref(&self) -> &AtomicPtr<Node<K, V, FANOUT>> {
236 unsafe { &*self.0 }
237 }
238}
239
240impl<
241 K: 'static + Clone + Minimum + Send + Sync + Ord,
242 V: 'static + Clone + Send + Sync,
243 const FANOUT: usize,
244 > BoxedAtomicPtr<K, V, FANOUT>
245{
246 fn new(node: Box<Node<K, V, FANOUT>>) -> BoxedAtomicPtr<K, V, FANOUT> {
247 let pointee_ptr = Box::into_raw(node);
248 let pointer_ptr = Box::into_raw(Box::new(AtomicPtr::new(pointee_ptr)));
249 BoxedAtomicPtr(pointer_ptr)
250 }
251
252 fn node_view<const LOCAL_GC_BUFFER_SIZE: usize>(
253 &self,
254 _guard: &mut Guard<'_, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
255 ) -> Option<NodeView<K, V, FANOUT>> {
256 let ptr = NonNull::new(self.load(Ordering::Acquire))?;
257
258 Some(NodeView { ptr, id: *self })
259 }
260}
261
262#[derive(Debug, PartialEq, Eq)]
264pub struct CasFailure<V> {
265 pub actual: Option<V>,
267 pub returned_new_value: Option<V>,
270}
271
272#[derive(Debug)]
273struct NodeView<K, V, const FANOUT: usize>
274where
275 K: 'static + Clone + Minimum + Ord + Send + Sync,
276 V: 'static + Clone + Send + Sync,
277{
278 ptr: NonNull<Node<K, V, FANOUT>>,
279 id: BoxedAtomicPtr<K, V, FANOUT>,
280}
281
282impl<K, V, const FANOUT: usize> NodeView<K, V, FANOUT>
283where
284 K: 'static + Clone + Minimum + Ord + Send + Sync,
285 V: 'static + Clone + Send + Sync,
286{
287 fn cas<const LOCAL_GC_BUFFER_SIZE: usize>(
290 &self,
291 replacement: Box<Node<K, V, FANOUT>>,
292 guard: &mut Guard<'_, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
293 ) -> Result<NodeView<K, V, FANOUT>, Option<NodeView<K, V, FANOUT>>> {
294 assert!(
295 !(replacement.hi.is_some() ^ replacement.next.is_some()),
296 "hi and next must both either be None or Some"
297 );
298
299 if debug_delay() {
300 return Err(Some(NodeView {
301 ptr: self.ptr,
302 id: self.id,
303 }));
304 }
305
306 let replacement_ptr = Box::into_raw(replacement);
307 let res = self.id.compare_exchange(
308 self.ptr.as_ptr(),
309 replacement_ptr,
310 Ordering::AcqRel,
311 Ordering::Acquire,
312 );
313
314 match res {
315 Ok(_) => {
316 let replaced: Box<Node<K, V, FANOUT>> = unsafe { Box::from_raw(self.ptr.as_ptr()) };
317 guard.defer_drop(Deferred::Node(replaced));
318 Ok(NodeView {
319 ptr: NonNull::new(replacement_ptr).unwrap(),
320 id: self.id,
321 })
322 }
323 Err(actual) => {
324 let failed_value: Box<Node<K, V, FANOUT>> =
325 unsafe { Box::from_raw(replacement_ptr) };
326 drop(failed_value);
327
328 if actual.is_null() {
329 Err(None)
330 } else {
331 Err(Some(NodeView {
332 ptr: NonNull::new(actual).unwrap(),
333 id: self.id,
334 }))
335 }
336 }
337 }
338 }
339
340 unsafe fn get_mut(&mut self) -> &mut Node<K, V, FANOUT> {
352 self.ptr.as_mut()
353 }
354}
355
356impl<K, V, const FANOUT: usize> Deref for NodeView<K, V, FANOUT>
357where
358 K: 'static + Clone + Minimum + Ord + Send + Sync,
359 V: 'static + Clone + Send + Sync,
360{
361 type Target = Node<K, V, FANOUT>;
362
363 fn deref(&self) -> &Self::Target {
364 unsafe { self.ptr.as_ref() }
365 }
366}
367
368pub trait Minimum: Ord {
372 const MIN: Self;
375}
376
377pub trait Maximum: Ord {
382 const MAX: Self;
385}
386
387impl Minimum for () {
388 const MIN: Self = ();
389}
390
391impl Minimum for bool {
392 const MIN: Self = false;
393}
394
395impl<T: Maximum> Minimum for std::cmp::Reverse<T> {
396 const MIN: Self = std::cmp::Reverse(T::MAX);
397}
398
399macro_rules! impl_integer {
400 ($($t:ty),+) => {
401 $(
402 impl Minimum for $t {
403 const MIN: Self = <$t>::MIN;
404 }
405
406 impl Maximum for $t {
407 const MAX: Self = <$t>::MAX;
408 }
409 )*
410 }
411}
412
413impl_integer!(
414 usize,
415 u8,
416 u16,
417 u32,
418 u64,
419 u128,
420 isize,
421 i8,
422 i16,
423 i32,
424 i64,
425 i128,
426 NonZeroI128,
427 NonZeroI16,
428 NonZeroI32,
429 NonZeroI64,
430 NonZeroI8,
431 NonZeroIsize,
432 NonZeroU128,
433 NonZeroU16,
434 NonZeroU32,
435 NonZeroU64,
436 NonZeroU8,
437 NonZeroUsize
438);
439
440impl<T: Ord> Minimum for Vec<T> {
441 const MIN: Self = Vec::new();
442}
443
444impl<T: Ord> Minimum for &[T] {
445 const MIN: Self = &[];
446}
447
448impl<T: Minimum, const LEN: usize> Minimum for [T; LEN] {
449 const MIN: Self = [T::MIN; LEN];
450}
451
452impl Minimum for String {
453 const MIN: Self = String::new();
454}
455
456impl Minimum for &str {
457 const MIN: Self = "";
458}
459
460impl<A: Minimum, B: Minimum> Minimum for (A, B) {
461 const MIN: Self = (A::MIN, B::MIN);
462}
463impl<A: Minimum, B: Minimum, C: Minimum> Minimum for (A, B, C) {
464 const MIN: Self = (A::MIN, B::MIN, C::MIN);
465}
466impl<A: Minimum, B: Minimum, C: Minimum, D: Minimum> Minimum for (A, B, C, D) {
467 const MIN: Self = (A::MIN, B::MIN, C::MIN, D::MIN);
468}
469impl<A: Minimum, B: Minimum, C: Minimum, D: Minimum, E: Minimum> Minimum for (A, B, C, D, E) {
470 const MIN: Self = (A::MIN, B::MIN, C::MIN, D::MIN, E::MIN);
471}
472impl<A: Minimum, B: Minimum, C: Minimum, D: Minimum, E: Minimum, F: Minimum> Minimum
473 for (A, B, C, D, E, F)
474{
475 const MIN: Self = (A::MIN, B::MIN, C::MIN, D::MIN, E::MIN, F::MIN);
476}
477
478#[derive(Clone)]
544pub struct ConcurrentMap<K, V, const FANOUT: usize = 64, const LOCAL_GC_BUFFER_SIZE: usize = 128>
545where
546 K: 'static + Clone + Minimum + Ord + Send + Sync,
547 V: 'static + Clone + Send + Sync,
548{
549 ebr: Ebr<Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
551 inner: Arc<Inner<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>>,
556 len: Arc<AtomicUsize>,
559}
560
561impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize> PartialEq
562 for ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
563where
564 K: 'static + fmt::Debug + Clone + Minimum + Ord + Send + Sync + PartialEq,
565 V: 'static + fmt::Debug + Clone + Send + Sync + PartialEq,
566{
567 fn eq(&self, other: &Self) -> bool {
568 let literally_the_same = Arc::as_ptr(&self.inner) == Arc::as_ptr(&other.inner);
569 if literally_the_same {
570 return true;
571 }
572
573 let self_iter = self.iter();
574 let mut other_iter = other.iter();
575
576 for self_kv in self_iter {
577 let other_kv = other_iter.next();
578 if !Some(self_kv).eq(&other_kv) {
579 return false;
580 }
581 }
582
583 other_iter.next().is_none()
584 }
585}
586
587impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize> fmt::Debug
588 for ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
589where
590 K: 'static + fmt::Debug + Clone + Minimum + Ord + Send + Sync,
591 V: 'static + fmt::Debug + Clone + Send + Sync,
592{
593 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
594 f.write_str("ConcurrentMap ")?;
595 f.debug_map().entries(self.iter()).finish()
596 }
597}
598
599impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize> Default
600 for ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
601where
602 K: 'static + Clone + Minimum + Ord + Send + Sync,
603 V: 'static + Clone + Send + Sync,
604{
605 fn default() -> ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE> {
606 assert!(FANOUT > 3, "ConcurrentMap FANOUT must be greater than 3");
607 assert!(
608 LOCAL_GC_BUFFER_SIZE > 0,
609 "LOCAL_GC_BUFFER_SIZE must be greater than 0"
610 );
611 let mut root_node = Node::<K, V, FANOUT>::new_root();
612 let root_node_lo = root_node.lo.clone();
613 let leaf_node = Node::<K, V, FANOUT>::new_leaf(root_node.lo.clone());
614 let leaf = BoxedAtomicPtr::new(leaf_node);
615 root_node.index_mut().insert(root_node_lo, leaf);
616
617 let root = BoxedAtomicPtr::new(root_node);
618
619 let inner = Arc::new(Inner {
620 root,
621 #[cfg(feature = "timing")]
622 slowest_op: u64::MIN.into(),
623 #[cfg(feature = "timing")]
624 fastest_op: u64::MAX.into(),
625 });
626
627 ConcurrentMap {
628 ebr: Ebr::default(),
629 inner,
630 len: Arc::new(0.into()),
631 }
632 }
633}
634
635struct Inner<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize>
636where
637 K: 'static + Clone + Minimum + Ord + Send + Sync,
638 V: 'static + Clone + Send + Sync,
639{
640 root: BoxedAtomicPtr<K, V, FANOUT>,
641 #[cfg(feature = "timing")]
642 slowest_op: AtomicU64,
643 #[cfg(feature = "timing")]
644 fastest_op: AtomicU64,
645}
646
647impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize> Drop
648 for Inner<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
649where
650 K: 'static + Clone + Minimum + Ord + Send + Sync,
651 V: 'static + Clone + Send + Sync,
652{
653 fn drop(&mut self) {
654 #[cfg(feature = "timing")]
655 self.print_timing();
656
657 let ebr = Ebr::default();
658 let mut guard = ebr.pin();
659
660 let mut cursor: NodeView<K, V, FANOUT> = self.root(&mut guard);
661
662 let mut lhs_chain: Vec<BoxedAtomicPtr<K, V, FANOUT>> = vec![];
663
664 loop {
665 lhs_chain.push(cursor.id);
666 if cursor.is_leaf() {
667 break;
668 }
669 let child_ptr: BoxedAtomicPtr<K, V, FANOUT> = cursor.index().get_index(0).unwrap().1;
670
671 cursor = child_ptr.node_view(&mut guard).unwrap();
672 }
673
674 let mut layer = 0;
675 for lhs_ptr in lhs_chain {
676 layer += 1;
677
678 let mut min_fill_physical: f64 = 1.0;
679 let mut max_fill_physical: f64 = 0.0;
680 let mut fill_sum_physical: f64 = 0.0;
681
682 let mut min_fill_logical: f64 = 1.0;
683 let mut max_fill_logical: f64 = 0.0;
684 let mut fill_sum_logical: f64 = 0.0;
685 let mut nodes_counted: usize = 0;
686
687 let mut next_opt: Option<BoxedAtomicPtr<K, V, FANOUT>> = Some(lhs_ptr);
688 while let Some(next) = next_opt {
689 assert!(!next.0.is_null());
690 let sibling_cursor = next.node_view(&mut guard).unwrap();
691
692 let fill_phy = ((size_of::<K>() + size_of::<V>()) * sibling_cursor.len()) as f64
693 / size_of::<Node<K, V, FANOUT>>() as f64;
694 min_fill_physical = min_fill_physical.min(fill_phy);
695 max_fill_physical = max_fill_physical.max(fill_phy);
696 fill_sum_physical += fill_phy;
697
698 let fill_log = sibling_cursor.len() as f64 / FANOUT as f64;
699 min_fill_logical = min_fill_logical.min(fill_log);
700 max_fill_logical = max_fill_logical.max(fill_log);
701 fill_sum_logical += fill_log;
702 nodes_counted += 1;
703
704 next_opt = sibling_cursor.next;
705 let node_box = unsafe { Box::from_raw(sibling_cursor.ptr.as_ptr()) };
706 drop(node_box);
707
708 let reclaimed_ptr: Box<AtomicPtr<Node<K, V, FANOUT>>> =
709 unsafe { Box::from_raw(next.0 as *mut _) };
710 drop(reclaimed_ptr);
711 }
712
713 if cfg!(feature = "print_utilization_on_drop") {
714 println!("layer {layer} count {nodes_counted}");
715 println!(
716 "logical: min: {min_fill_logical} max: {max_fill_logical} avg: {}",
717 fill_sum_logical / nodes_counted as f64
718 );
719 println!(
720 "physical: min: {min_fill_physical} max: {max_fill_physical} avg: {}",
721 fill_sum_physical / nodes_counted as f64
722 );
723 }
724 }
725 }
726}
727
728impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize>
729 ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
730where
731 K: 'static + Clone + Minimum + Ord + Send + Sync,
732 V: 'static + Clone + Send + Sync,
733{
734 pub fn new() -> Self {
743 Self::default()
744 }
745
746 pub fn get<Q>(&self, key: &Q) -> Option<V>
763 where
764 K: Borrow<Q>,
765 Q: Ord + ?Sized,
766 {
767 let mut guard = self.ebr.pin();
768
769 let leaf = self.inner.leaf_for_key(LeafSearch::Eq(key), &mut guard);
770
771 leaf.get(key)
772 }
773
774 pub fn contains_key<Q>(&self, key: &Q) -> bool
786 where
787 K: Borrow<Q>,
788 Q: Ord + ?Sized,
789 {
790 self.get(key).is_some()
791 }
792
793 pub fn get_lt<Q>(&self, key: &Q) -> Option<(K, V)>
817 where
818 K: Borrow<Q>,
819 Q: ?Sized + Ord + PartialEq,
820 {
821 if key == K::MIN.borrow() {
822 return None;
823 }
824
825 let start = Bound::Unbounded;
826 let end = Bound::Excluded(key);
827 self.range((start, end)).next_back()
828 }
829
830 pub fn get_lte<Q>(&self, key: &Q) -> Option<(K, V)>
852 where
853 K: Borrow<Q>,
854 Q: ?Sized + Ord + PartialEq,
855 {
856 let mut guard = self.ebr.pin();
857 let end = LeafSearch::Eq(key);
858 let current_back = self.inner.leaf_for_key(end, &mut guard);
859
860 if let Some((k, v)) = current_back.leaf().get_less_than_or_equal(key) {
862 return Some((k.clone(), v.clone()));
863 }
864
865 let current = self
867 .inner
868 .leaf_for_key(LeafSearch::Eq(K::MIN.borrow()), &mut guard);
869
870 Iter {
871 guard,
872 inner: &self.inner,
873 range: (Bound::Unbounded, Bound::Included(key)),
874 current,
875 current_back,
876 next_index: 0,
877 next_index_from_back: 0,
878 q: std::marker::PhantomData,
879 }
880 .next_back()
881 }
882
883 pub fn get_gt<Q>(&self, key: &Q) -> Option<(K, V)>
905 where
906 K: Borrow<Q>,
907 Q: ?Sized + Ord + PartialEq,
908 {
909 self.range((Bound::Excluded(key), Bound::Unbounded)).next()
910 }
911
912 pub fn get_gte<Q>(&self, key: &Q) -> Option<(K, V)>
934 where
935 K: Borrow<Q>,
936 Q: ?Sized + Ord + PartialEq,
937 {
938 self.range((Bound::Included(key), Bound::Unbounded)).next()
939 }
940
941 pub fn first(&self) -> Option<(K, V)> {
956 self.iter().next()
957 }
958
959 pub fn pop_first(&self) -> Option<(K, V)>
976 where
977 V: PartialEq,
978 {
979 loop {
980 let (k, v) = self.first()?;
981 if self.cas(k.clone(), Some(&v), None).is_ok() {
982 return Some((k, v));
983 }
984 }
985 }
986
987 pub fn pop_first_in_range<Q, R>(&self, range: R) -> Option<(K, V)>
1020 where
1021 R: std::ops::RangeBounds<Q> + Clone,
1022 K: Borrow<Q>,
1023 V: PartialEq,
1024 Q: ?Sized + Ord + PartialEq,
1025 {
1026 loop {
1027 let mut r = self.range(range.clone());
1028 let (k, v) = r.next()?;
1029 if self.cas(k.clone(), Some(&v), None).is_ok() {
1030 return Some((k, v));
1031 }
1032 }
1033 }
1034
1035 pub fn last(&self) -> Option<(K, V)> {
1050 self.iter().next_back()
1051 }
1052
1053 pub fn pop_last(&self) -> Option<(K, V)>
1070 where
1071 V: PartialEq,
1072 {
1073 loop {
1074 let (k, v) = self.last()?;
1075 if self.cas(k.clone(), Some(&v), None).is_ok() {
1076 return Some((k, v));
1077 }
1078 }
1079 }
1080
1081 pub fn pop_last_in_range<Q, R>(&self, range: R) -> Option<(K, V)>
1120 where
1121 R: std::ops::RangeBounds<Q> + Clone,
1122 K: Borrow<Q>,
1123 V: PartialEq,
1124 Q: ?Sized + Ord + PartialEq,
1125 {
1126 loop {
1127 let mut r = self.range(range.clone());
1128 let (k, v) = r.next_back()?;
1129 if self.cas(k.clone(), Some(&v), None).is_ok() {
1130 return Some((k, v));
1131 }
1132 }
1133 }
1134
1135 pub fn insert(&self, key: K, value: V) -> Option<V> {
1150 let strong_count = Arc::strong_count(&self.inner);
1151 let direct_mutations_safe = strong_count == 1;
1152
1153 if direct_mutations_safe && !debug_delay() {
1157 let mut guard = self.ebr.pin();
1158 let mut leaf = self.inner.leaf_for_key(LeafSearch::Eq(&key), &mut guard);
1159 let node_mut_ref: &mut Node<K, V, FANOUT> = unsafe { leaf.get_mut() };
1160 assert!(!node_mut_ref.should_split(), "bad leaf: should split",);
1161
1162 let ret = node_mut_ref.insert(key, value);
1163
1164 if node_mut_ref.should_split() {
1165 node_mut_ref.split();
1168 }
1169
1170 if ret.is_none() {
1171 self.len.fetch_add(1, Ordering::Relaxed);
1172 }
1173
1174 return ret;
1175 }
1176
1177 loop {
1179 let mut guard = self.ebr.pin();
1180 let leaf = self.inner.leaf_for_key(LeafSearch::Eq(&key), &mut guard);
1181
1182 let mut leaf_clone: Box<Node<K, V, FANOUT>> = Box::new((*leaf).clone());
1183 assert!(!leaf_clone.should_split(), "bad leaf: should split",);
1184
1185 let ret = leaf_clone.insert(key.clone(), value.clone());
1186
1187 let rhs_ptr_opt = if leaf_clone.should_split() {
1188 Some(leaf_clone.split())
1189 } else {
1190 None
1191 };
1192
1193 let install_attempt = leaf.cas(leaf_clone, &mut guard);
1194
1195 if install_attempt.is_ok() {
1196 if ret.is_none() {
1197 self.len.fetch_add(1, Ordering::Relaxed);
1198 }
1199 return ret;
1200 } else if let Some(new_ptr) = rhs_ptr_opt {
1201 let reclaimed_ptr: Box<AtomicPtr<Node<K, V, FANOUT>>> =
1203 unsafe { Box::from_raw(new_ptr.0 as *mut _) };
1204
1205 let _dropping_reclaimed_rhs: Box<Node<K, V, FANOUT>> =
1206 unsafe { Box::from_raw(reclaimed_ptr.load(Ordering::Acquire)) };
1207 }
1208 }
1209 }
1210
1211 pub fn remove<Q>(&self, key: &Q) -> Option<V>
1223 where
1224 K: Borrow<Q>,
1225 Q: Ord + ?Sized,
1226 {
1227 loop {
1228 let mut guard = self.ebr.pin();
1229 let leaf = self.inner.leaf_for_key(LeafSearch::Eq(key), &mut guard);
1230 let mut leaf_clone: Box<Node<K, V, FANOUT>> = Box::new((*leaf).clone());
1231 let ret = leaf_clone.remove(key);
1232 let install_attempt = leaf.cas(leaf_clone, &mut guard);
1233 if install_attempt.is_ok() {
1234 if ret.is_some() {
1235 self.len.fetch_sub(1, Ordering::Relaxed);
1236 }
1237 return ret;
1238 }
1239 }
1240 }
1241
1242 pub fn cas<VRef>(
1286 &self,
1287 key: K,
1288 old: Option<&VRef>,
1289 new: Option<V>,
1290 ) -> Result<Option<V>, CasFailure<V>>
1291 where
1292 V: Borrow<VRef>,
1293 VRef: PartialEq + ?Sized,
1294 {
1295 loop {
1296 let mut guard = self.ebr.pin();
1297 let leaf = self.inner.leaf_for_key(LeafSearch::Eq(&key), &mut guard);
1298 let mut leaf_clone: Box<Node<K, V, FANOUT>> = Box::new((*leaf).clone());
1299 let ret = leaf_clone.cas(key.clone(), old, new.clone());
1300
1301 let rhs_ptr_opt = if leaf_clone.should_split() {
1302 Some(leaf_clone.split())
1303 } else {
1304 None
1305 };
1306
1307 let install_attempt = leaf.cas(leaf_clone, &mut guard);
1308
1309 if install_attempt.is_ok() {
1310 if matches!(ret, Ok(Some(_))) && new.is_none() {
1311 self.len.fetch_sub(1, Ordering::Relaxed);
1312 } else if matches!(ret, Ok(None)) && new.is_some() {
1313 self.len.fetch_add(1, Ordering::Relaxed);
1314 }
1315 return ret;
1316 } else if let Some(new_ptr) = rhs_ptr_opt {
1317 let reclaimed_ptr: Box<AtomicPtr<Node<K, V, FANOUT>>> =
1319 unsafe { Box::from_raw(new_ptr.0 as *mut _) };
1320
1321 let _dropping_reclaimed_rhs: Box<Node<K, V, FANOUT>> =
1322 unsafe { Box::from_raw(reclaimed_ptr.load(Ordering::Acquire)) };
1323 }
1324 }
1325 }
1326
1327 pub fn len(&self) -> usize {
1331 self.len.load(Ordering::Relaxed)
1332 }
1333
1334 pub fn is_empty(&self) -> bool {
1337 self.len() == 0
1338 }
1339
1340 pub fn iter(&self) -> Iter<'_, K, V, FANOUT, LOCAL_GC_BUFFER_SIZE> {
1374 let mut guard = self.ebr.pin();
1375
1376 let current = self.inner.leaf_for_key(LeafSearch::Eq(&K::MIN), &mut guard);
1377 let current_back = self.inner.leaf_for_key(LeafSearch::Max, &mut guard);
1378 let next_index_from_back = 0;
1379
1380 Iter {
1381 guard,
1382 inner: &self.inner,
1383 current,
1384 range: std::ops::RangeFull,
1385 next_index: 0,
1386 current_back,
1387 next_index_from_back,
1388 q: std::marker::PhantomData,
1389 }
1390 }
1391
1392 pub fn range<Q, R>(&self, range: R) -> Iter<'_, K, V, FANOUT, LOCAL_GC_BUFFER_SIZE, R, Q>
1438 where
1439 R: std::ops::RangeBounds<Q>,
1440 K: Borrow<Q>,
1441 Q: ?Sized + Ord + PartialEq,
1442 {
1443 let mut guard = self.ebr.pin();
1444
1445 let kmin = &K::MIN;
1446 let min = kmin.borrow();
1447 let start = match range.start_bound() {
1448 Bound::Unbounded => min,
1449 Bound::Included(k) | Bound::Excluded(k) => k,
1450 };
1451
1452 let end = match range.end_bound() {
1453 Bound::Unbounded => LeafSearch::Max,
1454 Bound::Included(k) => LeafSearch::Eq(k),
1455 Bound::Excluded(k) => {
1456 assert!(k != K::MIN.borrow());
1457 LeafSearch::Lt(k)
1458 }
1459 };
1460
1461 let current = self.inner.leaf_for_key(LeafSearch::Eq(start), &mut guard);
1462 let current_back = self.inner.leaf_for_key(end, &mut guard);
1463
1464 Iter {
1465 guard,
1466 inner: &self.inner,
1467 range,
1468 current,
1469 current_back,
1470 next_index: 0,
1471 next_index_from_back: 0,
1472 q: std::marker::PhantomData,
1473 }
1474 }
1475
1476 pub fn update_and_fetch<F>(&self, key: K, mut f: F) -> Option<V>
1513 where
1514 F: FnMut(Option<&V>) -> Option<V>,
1515 V: PartialEq,
1516 {
1517 let mut current_opt = self.get(&key);
1518
1519 loop {
1520 let next = f(current_opt.as_ref());
1521 match self.cas(key.clone(), current_opt.as_ref(), next.clone()) {
1522 Ok(_) => return next,
1523 Err(CasFailure { actual: cur, .. }) => {
1524 current_opt = cur;
1525 }
1526 }
1527 }
1528 }
1529
1530 pub fn fetch_and_update<F>(&self, key: K, mut f: F) -> Option<V>
1571 where
1572 F: FnMut(Option<&V>) -> Option<V>,
1573 V: PartialEq,
1574 {
1575 let mut current_opt = self.get(&key);
1576
1577 loop {
1578 let next = f(current_opt.as_ref());
1579 match self.cas(key.clone(), current_opt.as_ref(), next) {
1580 Ok(_) => return current_opt,
1581 Err(CasFailure { actual: cur, .. }) => {
1582 current_opt = cur;
1583 }
1584 }
1585 }
1586 }
1587}
1588
1589impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize>
1592 ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
1593where
1594 K: 'static + Clone + Minimum + Ord + Send + Sync,
1595 V: 'static + Clone + Send + Sync + Ord,
1596{
1597 pub fn fetch_min(&self, key: K, value: V) -> Option<V> {
1624 let f = move |prev_opt: Option<&V>| {
1625 if let Some(prev) = prev_opt {
1626 Some(prev.min(&value).clone())
1627 } else {
1628 Some(value.clone())
1629 }
1630 };
1631
1632 self.fetch_and_update(key, f)
1633 }
1634
1635 pub fn fetch_max(&self, key: K, value: V) -> Option<V> {
1662 let f = move |prev_opt: Option<&V>| {
1663 if let Some(prev) = prev_opt {
1664 Some(prev.max(&value).clone())
1665 } else {
1666 Some(value.clone())
1667 }
1668 };
1669
1670 self.fetch_and_update(key, f)
1671 }
1672}
1673
1674pub struct Iter<
1702 'a,
1703 K,
1704 V,
1705 const FANOUT: usize,
1706 const LOCAL_GC_BUFFER_SIZE: usize,
1707 R = std::ops::RangeFull,
1708 Q = K,
1709> where
1710 K: 'static + Clone + Minimum + Ord + Send + Sync,
1711 V: 'static + Clone + Send + Sync,
1712 R: std::ops::RangeBounds<Q>,
1713 K: Borrow<Q>,
1714 Q: ?Sized,
1715{
1716 inner: &'a Inner<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>,
1717 guard: Guard<'a, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
1718 range: R,
1719 current: NodeView<K, V, FANOUT>,
1720 next_index: usize,
1721 current_back: NodeView<K, V, FANOUT>,
1722 next_index_from_back: usize,
1723 q: std::marker::PhantomData<&'a Q>,
1724}
1725
1726impl<'a, K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize, R, Q> Iterator
1727 for Iter<'a, K, V, FANOUT, LOCAL_GC_BUFFER_SIZE, R, Q>
1728where
1729 K: 'static + Clone + Minimum + Ord + Send + Sync,
1730 V: 'static + Clone + Send + Sync,
1731 R: std::ops::RangeBounds<Q>,
1732 K: Borrow<Q>,
1733 Q: ?Sized + PartialEq + Ord,
1734{
1735 type Item = (K, V);
1736
1737 fn next(&mut self) -> Option<Self::Item> {
1738 loop {
1739 if let Some((k, v)) = self.current.leaf().get_index(self.next_index) {
1740 self.next_index += 1;
1742 if !self.range.contains(k.borrow()) {
1743 continue;
1745 }
1746 return Some((k.clone(), v.clone()));
1747 } else if let Some(next_ptr) = self.current.next {
1748 if !self
1749 .range
1750 .contains(self.current.hi.as_ref().unwrap().borrow())
1751 {
1752 return None;
1754 }
1755 if let Some(next_current) = next_ptr.node_view(&mut self.guard) {
1756 self.next_index = next_current
1761 .leaf()
1762 .iter()
1763 .position(|(k, _v)| k >= self.current.hi.as_ref().unwrap())
1764 .unwrap_or(0);
1765
1766 self.current = next_current;
1767 } else if let Some(ref hi) = self.current.hi {
1768 let next_current = self
1774 .inner
1775 .leaf_for_key(LeafSearch::Eq(hi.borrow()), &mut self.guard);
1776
1777 self.next_index = next_current
1780 .leaf()
1781 .iter()
1782 .position(|(k, _v)| k >= hi)
1783 .unwrap_or(0);
1784 self.current = next_current;
1785 } else {
1786 panic!("somehow hit a node that has a next but not a hi key");
1787 }
1788 } else {
1789 return None;
1791 }
1792 }
1793 }
1794}
1795
1796impl<'a, K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize, R, Q> DoubleEndedIterator
1797 for Iter<'a, K, V, FANOUT, LOCAL_GC_BUFFER_SIZE, R, Q>
1798where
1799 K: 'static + Clone + Minimum + Ord + Send + Sync,
1800 V: 'static + Clone + Send + Sync,
1801 R: std::ops::RangeBounds<Q>,
1802 K: Borrow<Q>,
1803 Q: ?Sized + PartialEq + Ord,
1804{
1805 fn next_back(&mut self) -> Option<Self::Item> {
1806 loop {
1807 if self.next_index_from_back >= self.current_back.leaf().len() {
1808 if !self.range.contains(self.current_back.lo.borrow())
1809 || self.current_back.lo == K::MIN
1810 {
1811 return None;
1813 }
1814
1815 let next_current_back = self.inner.leaf_for_key(
1816 LeafSearch::Lt(self.current_back.lo.borrow()),
1817 &mut self.guard,
1818 );
1819 assert!(next_current_back.lo != self.current_back.lo);
1820
1821 self.next_index_from_back = next_current_back
1822 .leaf()
1823 .iter()
1824 .rev()
1825 .position(|(k, _v)| k < &self.current_back.lo)
1826 .unwrap_or(0);
1827
1828 self.current_back = next_current_back;
1829
1830 if self.current_back.leaf().is_empty() {
1831 continue;
1832 }
1833 }
1834
1835 let offset_to_return = self.current_back.leaf().len() - (1 + self.next_index_from_back);
1836 let (k, v) = self
1837 .current_back
1838 .leaf()
1839 .get_index(offset_to_return)
1840 .unwrap();
1841
1842 self.next_index_from_back += 1;
1843 if !self.range.contains(k.borrow()) {
1844 continue;
1845 } else {
1846 return Some((k.clone(), v.clone()));
1847 }
1848 }
1849 }
1850}
1851
1852enum LeafSearch<K> {
1853 Eq(K),
1855 Lt(K),
1859 Max,
1860}
1861
1862impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize>
1863 Inner<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
1864where
1865 K: 'static + Clone + Minimum + Ord + Send + Sync,
1866 V: 'static + Clone + Send + Sync,
1867{
1868 fn root(
1869 &self,
1870 _guard: &mut Guard<'_, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
1871 ) -> NodeView<K, V, FANOUT> {
1872 loop {
1873 if let Some(ptr) = NonNull::new(self.root.load(Ordering::Acquire)) {
1874 return NodeView { ptr, id: self.root };
1875 }
1876 }
1877 }
1878
1879 fn install_parent_merge<'a>(
1898 &'a self,
1899 parent: &NodeView<K, V, FANOUT>,
1900 child: &NodeView<K, V, FANOUT>,
1901 guard: &mut Guard<'a, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
1902 ) -> Result<NodeView<K, V, FANOUT>, ()> {
1903 let is_leftmost_child = parent.index().get_index(0).unwrap().0 == child.lo;
1907
1908 if is_leftmost_child {
1909 return Err(());
1910 }
1911
1912 if !parent.index().contains_key(&child.lo) {
1913 return Err(());
1915 }
1916
1917 if parent.merging_child.is_some() {
1918 return Err(());
1920 }
1921
1922 let mut parent_clone: Box<Node<K, V, FANOUT>> = Box::new((*parent).clone());
1923 parent_clone.merging_child = Some(child.id);
1924 parent.cas(parent_clone, guard).map_err(|_| ())
1925 }
1926
1927 fn merge_child<'a>(
1928 &'a self,
1929 parent: &mut NodeView<K, V, FANOUT>,
1930 child: &mut NodeView<K, V, FANOUT>,
1931 guard: &mut Guard<'a, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
1932 ) {
1933 while !child.is_merging {
1935 let mut child_clone: Box<Node<K, V, FANOUT>> = Box::new((*child).clone());
1936 child_clone.is_merging = true;
1937 *child = match child.cas(child_clone, guard) {
1938 Ok(new_child) | Err(Some(new_child)) => new_child,
1939 Err(None) => {
1940 return;
1942 }
1943 };
1944 }
1945
1946 let first_left_sibling_guess = parent
1948 .index()
1949 .iter()
1950 .filter(|(k, _v)| (..&child.lo).contains(&k))
1951 .next_back()
1952 .unwrap()
1953 .1;
1954
1955 let mut left_sibling = if let Some(view) = first_left_sibling_guess.node_view(guard) {
1956 view
1957 } else {
1958 return;
1960 };
1961
1962 loop {
1963 if left_sibling.next.is_none() {
1964 return;
1967 }
1968
1969 if child.hi.is_some() && left_sibling.hi.is_some() && left_sibling.hi >= child.hi {
1970 break;
1972 }
1973
1974 let next = left_sibling.next.unwrap();
1975 if next != child.id {
1976 left_sibling = if let Some(view) = next.node_view(guard) {
1977 view
1978 } else {
1979 return;
1981 };
1982 continue;
1983 }
1984
1985 let mut left_sibling_clone: Box<Node<K, V, FANOUT>> = Box::new((*left_sibling).clone());
1990 left_sibling_clone.merge(child);
1991
1992 let rhs_ptr_opt = if left_sibling_clone.should_split() {
1993 Some(left_sibling_clone.split())
1998 } else {
1999 None
2000 };
2001
2002 let cas_result = left_sibling.cas(left_sibling_clone, guard);
2003 if let (Err(_), Some(rhs_ptr)) = (&cas_result, rhs_ptr_opt) {
2004 let reclaimed_ptr: Box<AtomicPtr<Node<K, V, FANOUT>>> =
2006 unsafe { Box::from_raw(rhs_ptr.0 as *mut _) };
2007
2008 let _dropping_reclaimed_rhs: Box<Node<K, V, FANOUT>> =
2009 unsafe { Box::from_raw(reclaimed_ptr.load(Ordering::Acquire)) };
2010 }
2011
2012 match cas_result {
2013 Ok(_) => {
2014 break;
2015 }
2016 Err(Some(actual)) => left_sibling = actual,
2017 Err(None) => {
2018 return;
2019 }
2020 }
2021 }
2022
2023 while parent.merging_child == Some(child.id) {
2025 let mut parent_clone: Box<Node<K, V, FANOUT>> = Box::new((*parent).clone());
2026
2027 assert!(parent_clone.merging_child.is_some());
2028 assert!(parent_clone.index().contains_key(&child.lo));
2029
2030 parent_clone.merging_child = None;
2031 parent_clone.index_mut().remove(&child.lo).unwrap();
2032
2033 let cas_result = parent.cas(parent_clone, guard);
2034 match cas_result {
2035 Ok(new_parent) | Err(Some(new_parent)) => *parent = new_parent,
2036 Err(None) => {
2037 return;
2038 }
2039 }
2040 }
2041
2042 if child
2044 .id
2045 .compare_exchange(
2046 child.ptr.as_ptr(),
2047 std::ptr::null_mut(),
2048 Ordering::AcqRel,
2049 Ordering::Acquire,
2050 )
2051 .is_err()
2052 {
2053 return;
2056 }
2057
2058 guard.defer_drop(Deferred::BoxedAtomicPtr(child.id));
2060
2061 let replaced: Box<Node<K, V, FANOUT>> = unsafe { Box::from_raw(child.ptr.as_ptr()) };
2063 guard.defer_drop(Deferred::Node(replaced));
2064 }
2065
2066 #[cfg(feature = "timing")]
2067 fn print_timing(&self) {
2068 println!(
2069 "min : {:?}",
2070 Duration::from_nanos(self.fastest_op.load(Ordering::Acquire))
2071 );
2072 println!(
2073 "max : {:?}",
2074 Duration::from_nanos(self.slowest_op.load(Ordering::Acquire))
2075 );
2076 }
2077
2078 #[cfg(feature = "timing")]
2079 fn record_timing(&self, time: Duration) {
2080 let nanos = time.as_nanos() as u64;
2081 let min = self.fastest_op.load(Ordering::Relaxed);
2082 if nanos < min {
2083 self.fastest_op.fetch_min(nanos, Ordering::Relaxed);
2084 }
2085
2086 let max = self.slowest_op.load(Ordering::Relaxed);
2087 if nanos > max {
2088 self.slowest_op.fetch_max(nanos, Ordering::Relaxed);
2089 }
2090 }
2091
2092 fn leaf_for_key<'a, Q>(
2093 &'a self,
2094 search: LeafSearch<&Q>,
2095 guard: &mut Guard<'a, Deferred<K, V, FANOUT>, LOCAL_GC_BUFFER_SIZE>,
2096 ) -> NodeView<K, V, FANOUT>
2097 where
2098 K: Borrow<Q>,
2099 Q: Ord + ?Sized,
2100 {
2101 let mut parent_cursor_opt: Option<NodeView<K, V, FANOUT>> = None;
2102 let mut cursor = self.root(guard);
2103 let mut root_cursor = NodeView {
2104 ptr: cursor.ptr,
2105 id: cursor.id,
2106 };
2107
2108 macro_rules! reset {
2109 ($reason:expr) => {
2110 parent_cursor_opt = None;
2112 cursor = self.root(guard);
2113 root_cursor = NodeView {
2114 ptr: cursor.ptr,
2115 id: cursor.id,
2116 };
2117 continue;
2118 };
2119 }
2120
2121 #[cfg(feature = "timing")]
2122 let before = Instant::now();
2123
2124 loop {
2125 if let Some(merging_child_ptr) = cursor.merging_child {
2126 let mut child = if let Some(view) = merging_child_ptr.node_view(guard) {
2127 view
2128 } else {
2129 reset!("merging child of marked parent already freed");
2130 };
2131 self.merge_child(&mut cursor, &mut child, guard);
2132 reset!("cooperatively performed merge_child after detecting parent");
2133 }
2134
2135 if cursor.is_merging {
2136 reset!("resetting after detected child merging without corresponding parent child_merge");
2137 }
2138
2139 if cursor.should_merge() {
2140 if let Some(ref mut parent_cursor) = parent_cursor_opt {
2141 let is_leftmost_child =
2142 parent_cursor.index().get_index(0).unwrap().0 == cursor.lo;
2143
2144 if !is_leftmost_child {
2145 if let Ok(new_parent) =
2146 self.install_parent_merge(parent_cursor, &cursor, guard)
2147 {
2148 *parent_cursor = new_parent;
2149 } else {
2150 reset!("failed to install parent merge");
2151 }
2152
2153 self.merge_child(parent_cursor, &mut cursor, guard);
2154 reset!("completed merge_child");
2155 }
2156 } else {
2157 assert!(!cursor.is_leaf());
2158 }
2159 }
2160
2161 match search {
2162 LeafSearch::Eq(k) | LeafSearch::Lt(k) => assert!(k >= cursor.lo.borrow()),
2163 LeafSearch::Max => {}
2164 }
2165
2166 if let Some(hi) = &cursor.hi {
2167 let go_right = match search {
2168 LeafSearch::Eq(k) => k >= hi.borrow(),
2169 LeafSearch::Lt(k) => k > hi.borrow(),
2171 LeafSearch::Max => true,
2172 };
2173 if go_right {
2174 let next = cursor.next.unwrap();
2176 let rhs = if let Some(view) = next.node_view(guard) {
2177 view
2178 } else {
2179 reset!("right child already freed");
2180 };
2181
2182 if let Some(ref mut parent_cursor) = parent_cursor_opt {
2183 if parent_cursor.is_viable_parent_for(&rhs) {
2184 let mut parent_clone: Box<Node<K, V, FANOUT>> =
2185 Box::new((*parent_cursor).clone());
2186 assert!(!parent_clone.is_leaf());
2187 parent_clone.index_mut().insert(rhs.lo.clone(), next);
2188
2189 let rhs_ptr_opt = if parent_clone.should_split() {
2190 Some(parent_clone.split())
2191 } else {
2192 None
2193 };
2194
2195 if let Ok(new_parent_view) = parent_cursor.cas(parent_clone, guard) {
2196 parent_cursor_opt = Some(new_parent_view);
2197 } else if let Some(rhs_ptr) = rhs_ptr_opt {
2198 let reclaimed_ptr: Box<AtomicPtr<Node<K, V, FANOUT>>> =
2199 unsafe { Box::from_raw(rhs_ptr.0 as *mut _) };
2200
2201 let _dropping_reclaimed_rhs: Box<Node<K, V, FANOUT>> =
2202 unsafe { Box::from_raw(reclaimed_ptr.load(Ordering::Acquire)) };
2203 }
2204 }
2205 } else {
2206 let current_root_ptr: AtomicPtr<_> = root_cursor.ptr.as_ptr().into();
2208 let new_index_ptr =
2209 BoxedAtomicPtr(Box::into_raw(Box::new(current_root_ptr)));
2210
2211 let mut new_root_node = Node::<K, V, FANOUT>::new_root();
2212 new_root_node
2213 .index_mut()
2214 .insert(cursor.lo.clone(), new_index_ptr);
2215 new_root_node.index_mut().insert(rhs.lo.clone(), next);
2216 let new_root_ptr = Box::into_raw(new_root_node);
2217
2218 let worked = !debug_delay()
2219 && self
2220 .root
2221 .compare_exchange(
2222 root_cursor.ptr.as_ptr(),
2223 new_root_ptr,
2224 Ordering::AcqRel,
2225 Ordering::Acquire,
2226 )
2227 .is_ok();
2228
2229 if worked {
2230 let parent_view = NodeView {
2231 id: self.root,
2232 ptr: NonNull::new(new_root_ptr).unwrap(),
2233 };
2234 parent_cursor_opt = Some(parent_view);
2235 } else {
2236 let dangling_root = unsafe { Box::from_raw(new_root_ptr) };
2237 drop(dangling_root);
2238
2239 let reclaimed_ptr: Box<AtomicPtr<Node<K, V, FANOUT>>> =
2240 unsafe { Box::from_raw(new_index_ptr.0 as *mut _) };
2241 drop(reclaimed_ptr);
2242 }
2243 }
2244
2245 cursor = rhs;
2246 continue;
2247 }
2248 }
2249
2250 if cursor.is_leaf() {
2251 assert!(!cursor.is_merging);
2252 assert!(cursor.merging_child.is_none());
2253 if let Some(ref hi) = cursor.hi {
2254 match search {
2255 LeafSearch::Eq(k) => assert!(k < hi.borrow()),
2256 LeafSearch::Lt(k) => assert!(k <= hi.borrow()),
2257 LeafSearch::Max => {
2258 unreachable!("leaf should have no hi key if we're searching for Max")
2259 }
2260 }
2261 }
2262 break;
2263 }
2264
2265 let index = cursor.index();
2267 let child_ptr = match search {
2268 LeafSearch::Eq(k) => index.get_less_than_or_equal(k).unwrap().1,
2269 LeafSearch::Lt(k) => {
2270 index.get_less_than(k).unwrap().1
2274 }
2275 LeafSearch::Max => {
2276 index
2277 .get_index(index.len().checked_sub(1).unwrap())
2278 .unwrap()
2279 .1
2280 }
2281 };
2282
2283 parent_cursor_opt = Some(cursor);
2284 cursor = if let Some(view) = child_ptr.node_view(guard) {
2285 view
2286 } else {
2287 reset!("attempt to traverse to child failed because the child has been freed");
2288 };
2289 }
2290
2291 #[cfg(feature = "timing")]
2292 self.record_timing(before.elapsed());
2293
2294 cursor
2295 }
2296}
2297
2298#[derive(Debug, Clone)]
2299#[repr(u8)]
2300enum Data<K, V, const FANOUT: usize>
2301where
2302 K: 'static + Clone + Minimum + Ord + Send + Sync,
2303 V: 'static + Clone + Send + Sync,
2304{
2305 Leaf(StackMap<K, V, FANOUT>),
2306 Index(StackMap<K, BoxedAtomicPtr<K, V, FANOUT>, FANOUT>),
2307}
2308
2309impl<K, V, const FANOUT: usize> Data<K, V, FANOUT>
2310where
2311 K: 'static + Clone + Minimum + Ord + Send + Sync,
2312 V: 'static + Clone + Send + Sync,
2313{
2314 const fn len(&self) -> usize {
2315 match self {
2316 Data::Leaf(ref leaf) => leaf.len(),
2317 Data::Index(ref index) => index.len(),
2318 }
2319 }
2320}
2321
2322#[derive(Debug)]
2323struct Node<K, V, const FANOUT: usize>
2324where
2325 K: 'static + Clone + Minimum + Ord + Send + Sync,
2326 V: 'static + Clone + Send + Sync,
2327{
2328 next: Option<BoxedAtomicPtr<K, V, FANOUT>>,
2329 merging_child: Option<BoxedAtomicPtr<K, V, FANOUT>>,
2330 data: Data<K, V, FANOUT>,
2331 lo: K,
2332 hi: Option<K>,
2333 is_merging: bool,
2334}
2335
2336impl<K, V, const FANOUT: usize> Clone for Node<K, V, FANOUT>
2337where
2338 K: 'static + Clone + Minimum + Ord + Send + Sync,
2339 V: 'static + Clone + Send + Sync,
2340{
2341 fn clone(&self) -> Node<K, V, FANOUT> {
2342 Node {
2343 lo: self.lo.clone(),
2344 hi: self.hi.clone(),
2345 next: self.next,
2346 data: self.data.clone(),
2347 merging_child: self.merging_child,
2348 is_merging: self.is_merging,
2349 }
2350 }
2351}
2352
2353impl<K, V, const FANOUT: usize> Node<K, V, FANOUT>
2354where
2355 K: 'static + Clone + Minimum + Ord + Send + Sync,
2356 V: 'static + Clone + Send + Sync,
2357{
2358 const fn index(&self) -> &StackMap<K, BoxedAtomicPtr<K, V, FANOUT>, FANOUT> {
2359 if let Data::Index(ref index) = self.data {
2360 index
2361 } else {
2362 unreachable!()
2363 }
2364 }
2365
2366 fn index_mut(&mut self) -> &mut StackMap<K, BoxedAtomicPtr<K, V, FANOUT>, FANOUT> {
2367 if let Data::Index(ref mut index) = self.data {
2368 index
2369 } else {
2370 unreachable!()
2371 }
2372 }
2373
2374 const fn leaf(&self) -> &StackMap<K, V, FANOUT> {
2375 if let Data::Leaf(ref leaf) = self.data {
2376 leaf
2377 } else {
2378 unreachable!()
2379 }
2380 }
2381
2382 fn leaf_mut(&mut self) -> &mut StackMap<K, V, FANOUT> {
2383 if let Data::Leaf(ref mut leaf) = self.data {
2384 leaf
2385 } else {
2386 unreachable!()
2387 }
2388 }
2389
2390 const fn is_leaf(&self) -> bool {
2391 matches!(self.data, Data::Leaf(..))
2392 }
2393
2394 fn new_root() -> Box<Node<K, V, FANOUT>> {
2395 let min_key = K::MIN;
2396 Box::new(Node {
2397 lo: min_key,
2398 hi: None,
2399 next: None,
2400 data: Data::Index(StackMap::new()),
2401 merging_child: None,
2402 is_merging: false,
2403 })
2404 }
2405
2406 fn new_leaf(lo: K) -> Box<Node<K, V, FANOUT>> {
2407 Box::new(Node {
2408 lo,
2409 hi: None,
2410 next: None,
2411 data: Data::Leaf(StackMap::new()),
2412 merging_child: None,
2413 is_merging: false,
2414 })
2415 }
2416
2417 fn get<Q>(&self, key: &Q) -> Option<V>
2418 where
2419 K: Borrow<Q>,
2420 Q: Ord + ?Sized,
2421 {
2422 assert!(!self.is_merging);
2423 assert!(self.merging_child.is_none());
2424 assert!(self.is_leaf());
2425
2426 self.leaf().get(key).cloned()
2427 }
2428
2429 fn remove<Q>(&mut self, key: &Q) -> Option<V>
2430 where
2431 K: Borrow<Q>,
2432 Q: Ord + ?Sized,
2433 {
2434 assert!(!self.is_merging);
2435 assert!(self.merging_child.is_none());
2436
2437 self.leaf_mut().remove(key)
2438 }
2439
2440 fn insert(&mut self, key: K, value: V) -> Option<V> {
2441 assert!(!self.is_merging);
2442 assert!(self.merging_child.is_none());
2443 assert!(!self.should_split());
2444
2445 self.leaf_mut().insert(key, value)
2446 }
2447
2448 fn cas<V2>(
2449 &mut self,
2450 key: K,
2451 old: Option<&V2>,
2452 new: Option<V>,
2453 ) -> Result<Option<V>, CasFailure<V>>
2454 where
2455 V: Borrow<V2>,
2456 V2: ?Sized + PartialEq,
2457 {
2458 assert!(!self.is_merging);
2459 assert!(self.merging_child.is_none());
2460
2461 assert!(!self.should_split());
2464
2465 match (old, self.leaf().get(&key)) {
2466 (expected, actual) if expected == actual.map(Borrow::borrow) => {
2467 if let Some(to_insert) = new {
2468 Ok(self.leaf_mut().insert(key, to_insert))
2469 } else {
2470 Ok(self.leaf_mut().remove(&key))
2471 }
2472 }
2473 (_, actual) => Err(CasFailure {
2474 actual: actual.cloned(),
2475 returned_new_value: new,
2476 }),
2477 }
2478 }
2479
2480 const fn should_merge(&self) -> bool {
2481 if self.merging_child.is_some() || self.is_merging {
2482 return false;
2483 }
2484 self.len() <= MERGE_SIZE
2485 }
2486
2487 const fn should_split(&self) -> bool {
2488 if self.merging_child.is_some() || self.is_merging {
2489 return false;
2490 }
2491 self.len() > FANOUT - MERGE_SIZE
2492 }
2493
2494 const fn len(&self) -> usize {
2495 self.data.len()
2496 }
2497
2498 fn split(&mut self) -> BoxedAtomicPtr<K, V, FANOUT> {
2499 assert!(!self.is_merging);
2500 assert!(self.merging_child.is_none());
2501
2502 let split_idx = if self.hi.is_none() {
2503 self.len() - 2
2505 } else if self.lo == K::MIN {
2506 2
2508 } else {
2509 FANOUT / 2
2510 };
2511
2512 let (split_point, rhs_data) = match self.data {
2513 Data::Leaf(ref mut leaf) => {
2514 let rhs_leaf = leaf.split_off(split_idx);
2515 let split_point = rhs_leaf.first().unwrap().0.clone();
2516 assert!(leaf.len() > MERGE_SIZE);
2517 (split_point, Data::Leaf(rhs_leaf))
2518 }
2519 Data::Index(ref mut index) => {
2520 let rhs_index = index.split_off(split_idx);
2521 let split_point = rhs_index.first().unwrap().0.clone();
2522 assert!(index.len() > MERGE_SIZE);
2523 (split_point, Data::Index(rhs_index))
2524 }
2525 };
2526
2527 assert!(rhs_data.len() < FANOUT - MERGE_SIZE);
2528 assert!(rhs_data.len() > MERGE_SIZE);
2529
2530 let rhs_hi = std::mem::replace(&mut self.hi, Some(split_point.clone()));
2531
2532 let rhs = BoxedAtomicPtr::new(Box::new(Node {
2533 lo: split_point,
2534 hi: rhs_hi,
2535 next: self.next,
2536 data: rhs_data,
2537 merging_child: None,
2538 is_merging: false,
2539 }));
2540
2541 self.next = Some(rhs);
2542
2543 assert!(!self.should_split());
2544
2545 rhs
2546 }
2547
2548 fn merge(&mut self, rhs: &NodeView<K, V, FANOUT>) {
2549 assert!(rhs.is_merging);
2550 assert!(!self.is_merging);
2551
2552 self.hi = rhs.hi.clone();
2553 self.next = rhs.next;
2554
2555 match self.data {
2556 Data::Leaf(ref mut leaf) => {
2557 for (k, v) in rhs.leaf().iter() {
2558 let prev = leaf.insert(k.clone(), v.clone());
2559 assert!(prev.is_none());
2560 }
2561 }
2562 Data::Index(ref mut index) => {
2563 for (k, v) in rhs.index().iter() {
2564 let prev = index.insert(k.clone(), *v);
2565 assert!(prev.is_none());
2566 }
2567 }
2568 }
2569 }
2570
2571 fn is_viable_parent_for(&self, possible_child: &NodeView<K, V, FANOUT>) -> bool {
2572 match (&self.hi, &possible_child.hi) {
2573 (Some(_), None) => return false,
2574 (Some(parent_hi), Some(child_hi)) if parent_hi < child_hi => return false,
2575 _ => {}
2576 }
2577 self.lo <= possible_child.lo
2578 }
2579}
2580
2581impl<K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize> FromIterator<(K, V)>
2582 for ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
2583where
2584 K: 'static + Clone + Minimum + Ord + Send + Sync,
2585 V: 'static + Clone + Send + Sync,
2586{
2587 fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
2588 let map = ConcurrentMap::default();
2589
2590 for (k, v) in iter {
2591 map.insert(k, v);
2592 }
2593
2594 map
2595 }
2596}
2597
2598impl<'a, K, V, const FANOUT: usize, const LOCAL_GC_BUFFER_SIZE: usize> IntoIterator
2599 for &'a ConcurrentMap<K, V, FANOUT, LOCAL_GC_BUFFER_SIZE>
2600where
2601 K: 'static + Clone + Minimum + Ord + Send + Sync,
2602 V: 'static + Clone + Send + Sync,
2603{
2604 type Item = (K, V);
2605 type IntoIter = Iter<'a, K, V, FANOUT, LOCAL_GC_BUFFER_SIZE, std::ops::RangeFull>;
2606
2607 fn into_iter(self) -> Self::IntoIter {
2608 self.iter()
2609 }
2610}
2611
2612const fn _test_impls() {
2614 const fn send<T: Send>() {}
2615 const fn clone<T: Clone>() {}
2616 send::<ConcurrentMap<usize, usize>>();
2617 clone::<ConcurrentMap<usize, usize>>();
2618}
2619
2620#[test]
2621fn basic_map() {
2622 let map = ConcurrentMap::<usize, usize>::default();
2623
2624 let n = 64; for i in 0..=n {
2626 assert_eq!(map.get(&i), None);
2627 map.insert(i, i);
2628 assert_eq!(map.get(&i), Some(i), "failed to get key {i}");
2629 }
2630
2631 for (i, (k, _v)) in map.range(..).enumerate() {
2632 assert_eq!(i, k);
2633 }
2634
2635 for (i, (k, _v)) in map.range(..).rev().enumerate() {
2636 assert_eq!(n - i, k);
2637 }
2638
2639 for (i, (k, _v)) in map.iter().enumerate() {
2640 assert_eq!(i, k);
2641 }
2642
2643 for (i, (k, _v)) in map.iter().rev().enumerate() {
2644 assert_eq!(n - i, k);
2645 }
2646
2647 for (i, (k, _v)) in map.range(0..).enumerate() {
2648 assert_eq!(i, k);
2649 }
2650
2651 for (i, (k, _v)) in map.range(0..).rev().enumerate() {
2652 assert_eq!(n - i, k);
2653 }
2654
2655 for (i, (k, _v)) in map.range(0..n).enumerate() {
2656 assert_eq!(i, k);
2657 }
2658
2659 for (i, (k, _v)) in map.range(0..n).rev().enumerate() {
2660 assert_eq!((n - 1) - i, k);
2661 }
2662
2663 for (i, (k, _v)) in map.range(0..=n).enumerate() {
2664 assert_eq!(i, k);
2665 }
2666
2667 for (i, (k, _v)) in map.range(0..=n).rev().enumerate() {
2668 assert_eq!(n - i, k);
2669 }
2670
2671 for i in 0..=n {
2672 assert_eq!(map.get(&i), Some(i), "failed to get key {i}");
2673 }
2674}
2675
2676#[test]
2677fn timing_map() {
2678 use std::time::Instant;
2679
2680 let map = ConcurrentMap::<u64, u64>::default();
2681
2682 let n = 1024 * 1024;
2683
2684 let insert = Instant::now();
2685 for i in 0..n {
2686 map.insert(i, i);
2687 }
2688 let insert_elapsed = insert.elapsed();
2689 println!(
2690 "{} inserts/s, total {:?}",
2691 (n * 1_000_000) / u64::try_from(insert_elapsed.as_micros().max(1)).unwrap_or(u64::MAX),
2692 insert_elapsed
2693 );
2694
2695 let scan = Instant::now();
2696 let count = map.range(..).count();
2697 assert_eq!(count as u64, n);
2698 let scan_elapsed = scan.elapsed();
2699 println!(
2700 "{} scanned items/s, total {:?}",
2701 (n * 1_000_000) / u64::try_from(scan_elapsed.as_micros().max(1)).unwrap_or(u64::MAX),
2702 scan_elapsed
2703 );
2704
2705 let scan_rev = Instant::now();
2706 let count = map.range(..).rev().count();
2707 assert_eq!(count as u64, n);
2708 let scan_rev_elapsed = scan_rev.elapsed();
2709 println!(
2710 "{} reverse-scanned items/s, total {:?}",
2711 (n * 1_000_000) / u64::try_from(scan_rev_elapsed.as_micros().max(1)).unwrap_or(u64::MAX),
2712 scan_rev_elapsed
2713 );
2714
2715 let gets = Instant::now();
2716 for i in 0..n {
2717 map.get(&i);
2718 }
2719 let gets_elapsed = gets.elapsed();
2720 println!(
2721 "{} gets/s, total {:?}",
2722 (n * 1_000_000) / u64::try_from(gets_elapsed.as_micros().max(1)).unwrap_or(u64::MAX),
2723 gets_elapsed
2724 );
2725}