skippy_rs/internal/sync/
mod.rs

1use core::borrow::Borrow;
2use core::fmt::Debug;
3use core::marker::Sync;
4use core::ptr::NonNull;
5use core::sync::atomic::Ordering;
6
7use haphazard::{
8    raw::Pointer,
9    Global, 
10    HazardPointer, 
11    Domain
12};
13
14use crate::internal::utils::{
15    skiplist_basics, 
16    GeneratesHeight, 
17    Node, 
18    HEIGHT
19};
20
21pub(crate) mod tagged;
22pub mod iter;
23pub use iter::{ Iter, IntoIter };
24
25skiplist_basics!(SkipList);
26
27impl<'a, K, V> Debug for SkipList<'a, K, V> {
28    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
29        f.debug_struct("SkipList").field("head", &self.head.as_ptr()).finish()
30    }
31}
32
33impl<'domain, K, V> SkipList<'domain, K, V>
34where
35    K: Ord + Send + Sync,
36    V: Send + Sync,
37{
38    /// Inserts a value in the list given a key.
39    pub fn insert<'a>(&'a self, key: K, val: V) -> Option<Entry<'a, K, V>> {
40        // After this check, whether we are holding the head or a regular Node will
41        // not impact the operation.
42        let mut insertion_point = self.find(&key, false);
43        let mut existing = None;
44
45        while let Some(target) = insertion_point.target.take() {
46            if target.try_remove_and_tag().is_ok() {
47                unsafe {
48                    let _ = self.unlink(&target, target.height(), &insertion_point.prev);
49                }
50                insertion_point = self.find(&key, false);
51                existing = Some(target);
52            }
53        };
54        
55        let mut prev = insertion_point.prev;
56
57        let new_node_raw = Node::new_rand_height(key, val, self);
58
59        // Protects the new_node so concurrent removals do not invalidate our pointer.
60        let new_node = NodeRef::from_raw(new_node_raw);
61
62        let mut starting_height = 0;
63
64        // The node should not be in build stage!
65        // assert!(new_node.set_build_begin().is_ok());
66        //
67
68        self.state.len.fetch_add(1, Ordering::AcqRel);
69
70        unsafe {
71            while let Err(starting) =
72                self.link_nodes(&new_node, prev, starting_height)
73            {
74                let mut search = self.find(&new_node.key, false);
75                
76                while let Some(target) = search.target.take() {
77                    if core::ptr::eq(target.as_ptr(), new_node.as_ptr()) {
78                        break;
79                    }
80
81                    if target.try_remove_and_tag().is_ok() {
82                        let _ = self.unlink(&target, target.height(), &search.prev);
83                        search = self.find(&new_node.key, false);
84                        existing = Some(target);
85                    }
86                };
87
88                (starting_height, prev) = (starting, search.prev);
89            }
90        }
91
92        existing.map(|existing| existing.into())
93    }
94
95    /// This function is unsafe, as it does not check whether new_node or link node are valid
96    /// pointers.
97    ///
98    /// # Safety
99    ///
100    /// 1. `new_node` cannot be null
101    /// 2. A tower of sufficient height must eventually be reached, the list head can be this tower
102    unsafe fn link_nodes<'a>(
103        &self,
104        new_node: &'a NodeRef<'a, K, V>,
105        previous_nodes: [(NodeRef<'a, K, V>, Option<NodeRef<'a, K, V>>); HEIGHT],
106        start_height: usize,
107    ) -> Result<(), usize> {
108        // iterate over all the levels in the new nodes pointer tower
109        for i in start_height..new_node.height() {
110            let (prev, next) = &previous_nodes[i];
111            let next_ptr = next.as_ref().map_or(core::ptr::null_mut(), |n| n.as_ptr());
112
113            let curr_next = new_node.levels[i].load_ptr();
114
115            if new_node.removed() {
116                break;
117            }
118
119            // We check if the next node is actually lower in key than our current node.
120            // If the key is not greater we stop building our node.
121            if next.as_ref()
122                .and_then(|n| if n.key <= new_node.key && !new_node.removed() {
123                    Some(())
124                } else {
125                    None
126                }).is_some()
127            {
128                break;
129            }
130            
131            // Swap the previous' next node into the new_node's level
132            // It could be the case that we link ourselves to the previous node, but just as we do
133            // this `next` attempts to unlink itself and fails. So while we succeeded, `next`
134            // repeats its search and finds that we are the next
135            if new_node.levels[i].compare_exchange(curr_next, next_ptr).is_err() {
136                return Err(i);
137            };
138
139            // If this is the base level, we simply increment the ref count, as we expect it to be
140            // 0. If it is not, we only increment if it > 0.
141            if i == 0 {
142                new_node.add_ref();
143            } else if new_node.try_add_ref().is_err() {
144                break;
145            }
146
147            // Swap the new_node into the previous' level. If the previous' level has changed since
148            // the search, we repeat the search from this level.
149            if let Err((_other, _tag)) = prev.levels[i].compare_exchange(
150                next_ptr, 
151                new_node.as_ptr()
152            ) {
153                new_node.sub_ref();
154                return Err(i);
155            }
156
157        }
158
159        // IF we linked the node, yet it was removed during that process, there may be some levels
160        // that we linked and that were missed by the removers. We search to unlink those too.
161        if new_node.removed() {
162            self.find(&new_node.key, false);
163        }
164
165        Ok(())
166    }
167
168    #[allow(unused_assignments)]
169    pub fn remove<'a>(&'a self, key: &K) -> Option<Entry<'a, K, V>>
170    where
171        K: Send,
172        V: Send,
173    {
174    match self.find(key, false) {
175        SearchResult {
176                target: Some(target),
177                prev,
178            } => {
179
180                // Set the target state to being removed
181                // If this errors, it is already being removed by someone else
182                // and thus we exit early.
183                if target.set_removed().is_err() {
184                    return None;
185                }
186
187                // # Safety:
188                // 1. `key` and `val` will not be tempered with.
189                // TODO This works for now, yet once `Atomic` is used
190                // this may need to change.
191                let height = target.height();
192
193                if let Err(_) = target.tag_levels(1) {
194                    panic!("SHOULD NOT BE TAGGED!")
195                };
196
197                // #Safety:
198                // 1. The height we got from the `node` guarantees it is a valid height for levels.
199                unsafe {
200                    if self.unlink(&target, height, &prev).is_err() {
201                        self.find(&key, false);
202                    }
203                }
204
205
206                Some(target.into())
207            }
208            _ => None,
209        }
210    }
211
212    /// Logically removes the node from the list by linking its adjacent nodes to one-another.
213    ///
214    /// # Safety
215    /// 1. All indices in [0, height) are valid indices for `node.levels`.
216    unsafe fn unlink<'a>(
217        &self,
218        node: &'a NodeRef<'a, K, V>,
219        height: usize,
220        previous_nodes: &[(NodeRef<'a, K, V>, Option<NodeRef<'a, K, V>>); HEIGHT],
221    ) -> Result<(), usize> {
222        // safety check against UB caused by unlinking the head
223        if self.is_head(node.as_ptr()) {
224            panic!()
225        }
226
227        // # Safety
228        //
229        // 1.-3. Some as method and covered by method caller.
230        // 4. We are not unlinking the head. - Covered by previous safety check.
231        for (i, (prev, next)) in previous_nodes.iter().enumerate().take(height).rev() {
232            let (new_next, _tag) = node.levels[i].load_decomposed();
233            let _next_ptr = next.as_ref().map_or(core::ptr::null_mut(), |n| n.as_ptr());
234
235            // We check if the previous node is being removed after we have already unlinked
236            // from it as the prev nodes expects us to do this.
237            // We still need to stop the unlink here, as we will have to relink to the actual,
238            // lively previous node at this level as well.
239
240            // Performs a compare_exchange, expecting the old value of the pointer to be the current
241            // node. If it is not, we cannot make any reasonable progress, so we search again.
242            if let Err((_other, _tag)) = prev.levels[i].compare_exchange(
243                node.as_ptr(),
244                new_next,
245            ) {
246                return Err(i + 1);
247            }
248
249            if self.sub_ref(&node).is_none() {
250                break;
251            };
252        }
253
254        self.state.len.fetch_sub(1, Ordering::AcqRel);
255
256        // we see if we can drop some pointers in the list.
257        self.garbage.domain.eager_reclaim();
258        Ok(())
259    }
260
261    /// Decrements the reference count of the `Node` by 1. If the reference count is thus 0, we
262    /// retire the node.
263    fn sub_ref<'a>(&self, node: &NodeRef<'a, K, V>) -> Option<()> {
264        if node.try_sub_ref().expect("to not overflow") == 0 {
265            self.retire_node(node.as_ptr());
266            None
267        } else {
268            Some(())
269        }
270    }
271
272    /// Unlink [Node](Node) `curr` at the given level of [Node](Node) `prev` by exchanging the pointer for `next`.
273    ///
274    /// # Safety
275    ///
276    /// 1. `prev`, `curr`, are protected accesses.
277    #[allow(unused)]
278    unsafe fn unlink_level<'a>(
279        &'a self,
280        prev: &NodeRef<'a, K, V>,
281        curr: NodeRef<'a, K, V>,
282        next: Option<NodeRef<'a, K, V>>,
283        level: usize,
284    ) -> Result<Option<NodeRef<'a, K, V>>, ()> {
285        // The pointer to `next` is tagged to signal unlinking. 
286        let next_ptr = next.as_ref().map_or(core::ptr::null_mut(), |n| n.as_ptr());
287
288        if let Ok(_) = prev.levels[level].compare_exchange(curr.as_ptr(), next_ptr) {
289            self.sub_ref(&curr);
290
291            Ok(next)
292        } else {
293            Err(())
294        }
295    }
296
297    fn retire_node(&self, node_ptr: *mut Node<K, V>) {
298        unsafe {
299            self.garbage
300                .domain
301                .retire_ptr::<Node<K, V>, DeallocOnDrop<K, V>>(node_ptr)
302        };
303    }
304
305    fn find<'a>(&'a self, key: &K, search_closest: bool) -> SearchResult<'a, K, V> {
306        let head = unsafe { &(*self.head.as_ptr()) };
307
308        // Initialize the `prev` array.
309        let mut prev = unsafe {
310            let mut prev: [core::mem::MaybeUninit<(NodeRef<'a, K, V>, Option<NodeRef<'a, K, V>>)>; HEIGHT] 
311                = core::mem::MaybeUninit::uninit().assume_init();
312
313            for (i, level) in prev.iter_mut().enumerate() {
314                core::ptr::write(
315                    level.as_mut_ptr(), 
316                    (NodeRef::from_raw(self.head.cast::<Node<K, V>>().as_ptr()), NodeRef::from_maybe_tagged(&self.head.as_ref().levels[i]))
317                )
318            }
319
320            core::mem::transmute::<_, [(NodeRef<'a, K, V>, Option<NodeRef<'a, K, V>>); HEIGHT]>(prev)
321        };
322
323
324        '_search: loop {
325            let mut level = self.state.max_height.load(Ordering::Relaxed);
326            // Find the first and highest node tower
327            while level > 1 && head.levels[level - 1].load_ptr().is_null() {
328                level -= 1;
329            }
330
331            // We need not protect the head, as it will always be valid, as long as we are in a sane
332            // state.
333            let mut curr = NodeRef::from_raw(self.head.as_ptr().cast::<Node<K, V>>());
334
335            // steps:
336            // 1. Go through each level until we reach a node with a key GEQ to ours or that is null
337            //     1.1 If we are equal, then the node must either be marked as removed or removed nodes
338            //       are allowed in this search.
339            //       Should this be the case, then we drop down a level while also protecting a pointer
340            //       to the current node, in order to keep the `Level` valid in our `prev` array.
341            //     1.2 If we the `next` node is less or equal but removed and removed nodes are
342            //       disallowed, then we set our current node to the next node.
343            while level > 0 {
344                let next = unsafe {
345                    let mut next = NodeRef::from_maybe_tagged(&curr.levels[level - 1]);
346                    loop {
347                        if next.is_none() {
348                            break next;
349                        }
350
351                        if let Some(n) = next.as_ref() {
352                            if n.levels[level - 1].load_tag() == 0 {
353                                break next;
354                            }
355                        }
356
357                        let n = next.unwrap();
358
359                        let new_next = NodeRef::from_maybe_tagged(&n.levels[level - 1]);
360
361                        let Ok(n) = self.unlink_level(&curr, n, new_next, level - 1) else {
362                            continue '_search;
363                        };
364
365                        next = n
366
367                    }
368                };
369
370                match next {
371                    Some(next) 
372                        // This check should ensure that we always get a non-removed node, if there
373                        // is one, of our target key, as long as allow removed is set to false.
374                        if (*next).key < *key => {
375
376                        // If the current node is being removed, we try to help unlinking it at this level.
377                        // Update previous_nodes.
378                        prev[level - 1] = (curr, Some(next.clone()));
379
380                        curr = next;
381                    },
382                    next => {
383                        // Update previous_nodes.
384                        prev[level - 1] = (curr.clone(), next);
385
386                        level -= 1;
387                    }
388                }
389            }
390
391            unsafe {
392                return if search_closest {
393                    let mut next = NodeRef::from_maybe_tagged(&curr.levels[level - 1]);
394                    loop {
395                        if next.is_none() {
396                            break;
397                        }
398
399                        if let Some(n) = next.as_ref() {
400                            if n.levels[level - 1].load_tag() == 0 {
401                                break;
402                            }
403                        }
404
405                        let n = next.unwrap();
406
407                        let new_next = NodeRef::from_maybe_tagged(&n.levels[level - 1]);
408
409                        let Ok(n) = self.unlink_level(&curr, n, new_next, level - 1) else {
410                            continue '_search;
411                        };
412
413                        next = n
414                    }
415
416                    SearchResult { prev, target: next }
417                } else {
418                    match NodeRef::from_maybe_tagged(&prev[0].0.as_ref().levels[0]) {
419                        Some(next) if next.key == *key && !next.removed() => SearchResult { prev, target: Some(next) },
420                        _ => SearchResult { prev, target: None }
421                    }
422                }
423            }
424        }
425    }
426
427    pub fn get<'a>(&'a self, key: &K) -> Option<Entry<'a, K, V>> {
428        if self.is_empty() {
429            return None;
430        }
431
432        // Perform safety check for whether we are dealing with the head.
433        match self.find(key, false) {
434            SearchResult {
435                target: Some(target),
436                ..
437            } => Some(Entry::from(target)),
438            _ => None,
439        }
440    }
441
442    fn is_head(&self, ptr: *const Node<K, V>) -> bool {
443        std::ptr::eq(ptr, self.head.as_ptr().cast())
444    }
445
446    fn next_node<'a>(&'a self, node: &Entry<'a, K, V>) -> Option<Entry<'a, K, V>> {
447        let node: &NodeRef<'_, _, _> = unsafe { core::mem::transmute(node) };
448
449        // This means we have a stale node and cannot return a sane answer!
450        if node.levels[0].load_tag() == 1 {
451            return self.find(&node.key, true).target.map(|t| t.into())
452        };
453
454        let mut next = NodeRef::from_maybe_tagged(&node.levels[0])?;
455        
456        // Unlink and skip all removed `Node`s we may encounter.
457        while next.levels[0].load_tag() == 1 {
458            let new = NodeRef::from_maybe_tagged(&next.levels[0]);
459            next = unsafe {
460                self.unlink_level(&node, next, new, 0)
461                    .ok()
462                    .unwrap_or_else(|| self.find(&node.key, true).target)?
463            };
464        }
465
466        Some(next.into())
467    }
468
469    pub fn get_first<'a>(&'a self) -> Option<Entry<'a, K, V>> {
470        if self.is_empty() {
471            return None;
472        }
473
474        let curr = NodeRef::from_raw(self.head.as_ptr().cast::<Node<K, V>>());
475
476        self.next_node(&curr.into())
477    }
478
479    pub fn get_last<'a>(&'a self) -> Option<Entry<'a, K, V>> {
480        let mut curr = self.get_first()?;
481
482        while let Some(next) = self.next_node(&curr) {
483            curr = next;
484        }
485
486        return Some(curr.into())
487    }
488
489    pub fn iter<'a>(&'a self) -> Iter<'a, K, V> {
490        Iter::from_list(self)
491    }
492}
493
494impl<'domain, K, V> Default for SkipList<'domain, K, V>
495where
496    K: Sync,
497    V: Sync,
498{
499    fn default() -> Self {
500        Self::new()
501    }
502}
503
504unsafe impl<'domain, K, V> Send for SkipList<'domain, K, V>
505where
506    K: Send + Sync,
507    V: Send + Sync,
508{
509}
510
511unsafe impl<'domain, K, V> Sync for SkipList<'domain, K, V>
512where
513    K: Send + Sync,
514    V: Send + Sync,
515{
516}
517
518// TODO Make sure this is sound!
519impl<'domain, K, V> From<super::skiplist::SkipList<'domain, K, V>> for SkipList<'domain, K, V>
520where
521    K: Sync,
522    V: Sync,
523{
524    fn from(list: super::skiplist::SkipList<'domain, K, V>) -> Self {
525        unsafe { core::mem::transmute(list) }
526    }
527}
528
529
530#[allow(dead_code)]
531pub struct Entry<'a, K: 'a, V: 'a> {
532    node: core::ptr::NonNull<Node<K, V>>,
533    _hazard: haphazard::HazardPointer<'a, Global>,
534}
535
536impl<'a, K, V> Entry<'a, K, V> {
537    pub fn val(&self) -> &V {
538        // #Safety
539        //
540        // Our `HazardPointer` ensures that our pointers is valid.
541        unsafe { &self.node.as_ref().val }
542    }
543
544    pub fn key(&self) -> &K {
545        // #Safety
546        //
547        // Our `HazardPointer` ensures that our pointers is valid.
548        unsafe { &self.node.as_ref().key }
549    }
550
551    pub fn remove(self) -> Option<Entry<'a, K, V>> {
552        unsafe {
553            self.node.as_ref().set_removed().ok()?;
554
555            self.node.as_ref().tag_levels(1).expect("no tags to exists");
556
557            Some(self)
558            
559        }
560    }
561}
562
563impl<'a, K, V> core::ops::Deref for Entry<'a, K, V> {
564    type Target = Node<K, V>;
565
566    fn deref(&self) -> &Self::Target {
567        unsafe { self.node.as_ref() }
568    }
569}
570
571struct SearchResult<'a, K, V> {
572    prev: [(NodeRef<'a, K, V>, Option<NodeRef<'a, K, V>>); HEIGHT],
573    target: Option<NodeRef<'a, K, V>>,
574}
575
576impl<'a, K, V> Debug for SearchResult<'a, K, V>
577where
578    K: Debug + Default,
579    V: Debug,
580{
581    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
582        f.debug_struct("SearchResult")
583            .field("target", &self.target)
584            .finish()
585    }
586}
587
588impl<'a, K, V> Borrow<K> for Entry<'a, K, V> {
589    fn borrow(&self) -> &K {
590        unsafe { &self.node.as_ref().key }
591    }
592}
593
594impl<'a, K, V> AsRef<V> for Entry<'a, K, V> {
595    fn as_ref(&self) -> &V {
596        unsafe { &self.node.as_ref().val }
597    }
598}
599
600#[allow(dead_code)]
601struct NodeRef<'a, K, V> {
602    node: NonNull<Node<K, V>>,
603    _hazard: HazardPointer<'a>
604}
605
606impl<'a, K, V> NodeRef<'a, K, V> {
607    fn from_raw_in(ptr: *mut Node<K, V>, domain: &'a Domain<Global>) -> Self {
608        let mut _hazard = HazardPointer::new_in_domain(domain);
609        _hazard.protect_raw(ptr);
610        unsafe {
611            NodeRef { node: NonNull::new_unchecked(ptr), _hazard }
612        }
613    }
614
615    fn from_raw(ptr: *mut Node<K, V>) -> Self {
616        Self::from_raw_in(ptr, Domain::global())
617    }
618
619    fn as_ptr(&self) -> *mut Node<K, V> {
620        self.node.as_ptr()
621    }
622}
623
624impl<'a, K, V> AsRef<Node<K, V>> for NodeRef<'a, K, V> {
625    fn as_ref(&self) -> &Node<K, V> {
626        unsafe { &(*self.as_ptr()) }
627    }
628}
629
630impl<'a, K, V> core::ops::Deref for NodeRef<'a, K, V> {
631    type Target = Node<K, V>;
632    fn deref(&self) -> &Self::Target {
633        self.as_ref()
634    }
635}
636
637impl<'a, K, V> core::ops::DerefMut for NodeRef<'a, K, V> {
638    fn deref_mut(&mut self) -> &mut Self::Target {
639        unsafe { &mut (*self.as_ptr()) }
640    }
641}
642
643impl<'a, K, V> core::fmt::Debug for NodeRef<'a, K, V> 
644where 
645    K: Debug, 
646    V: Debug 
647{
648    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
649        unsafe {
650            f.debug_struct("NodeRef").field("node", self.node.as_ref()).finish()
651        }
652    }
653}
654
655impl<'a, K, V> From<NodeRef<'a, K, V>> for Entry<'a, K, V> {
656    fn from(value: NodeRef<'a, K, V>) -> Self {
657        unsafe { core::mem::transmute(value) }
658    }
659}
660
661impl<'a, K, V> Clone for NodeRef<'a, K, V> {
662    fn clone(&self) -> Self {
663        let mut _hazard = HazardPointer::new();
664        _hazard.protect_raw(self.node.as_ptr());
665
666        NodeRef { node: self.node.clone(), _hazard }
667    }
668}
669
670impl<'a, K, V> core::cmp::PartialEq for NodeRef<'a, K, V> {
671    fn eq(&self, other: &Self) -> bool {
672        core::ptr::eq(self.node.as_ptr(), other.node.as_ptr())
673    }
674}
675
676impl<'a, K, V> core::cmp::Eq for NodeRef<'a, K, V> {}
677
678#[repr(transparent)]
679struct DeallocOnDrop<K, V>(*mut Node<K, V>);
680
681unsafe impl<K, V> Send for DeallocOnDrop<K, V> 
682where K: Send + Sync,
683      V: Send + Sync
684{
685}
686
687unsafe impl<K, V> Sync for DeallocOnDrop<K, V> 
688where K: Send + Sync,
689      V: Send + Sync
690{
691}
692
693impl<K, V> From<*mut Node<K, V>> for DeallocOnDrop<K, V> {
694    fn from(node: *mut Node<K, V>) -> Self {
695        DeallocOnDrop(node)
696    }
697}
698
699impl<K, V> Drop for DeallocOnDrop<K, V> {
700    fn drop(&mut self) {
701        unsafe {
702            Node::drop(self.0)
703        }
704    }
705}
706
707unsafe impl<K, V> Pointer<Node<K, V>> for DeallocOnDrop<K, V> {
708    fn into_raw(self) -> *mut Node<K, V> {
709        self.0
710    }
711
712    unsafe fn from_raw(ptr: *mut Node<K, V>) -> Self {
713        DeallocOnDrop::from(ptr)
714    }
715}
716
717impl<K, V> core::ops::Deref for DeallocOnDrop<K, V> {
718    type Target = Node<K, V>;
719
720    fn deref(&self) -> &Self::Target {
721        unsafe { &(*self.0) }
722    }
723}
724
725impl<K, V> core::ops::DerefMut for DeallocOnDrop<K, V> {
726    fn deref_mut(&mut self) -> &mut Self::Target {
727        unsafe {&mut (*self.0)}
728    }
729}
730
731#[cfg(test)]
732mod sync_test {
733    use rand::Rng;
734
735    use super::*;
736
737    #[test]
738    fn test_new_node_sync() {
739        let node = Node::new(100, "hello", 1);
740        let other = Node::new(100, "hello", 1);
741        unsafe { println!("node 1: {:?},", *node) };
742        unsafe { println!("node 2: {:?},", *other) };
743        let other = unsafe {
744            let node = Node::alloc(1);
745            core::ptr::write(&mut (*node).key, 100);
746            core::ptr::write(&mut (*node).val, "hello");
747            node
748        };
749
750        unsafe { println!("node 1: {:?}, node 2: {:?}", *node, *other) };
751
752        unsafe { assert_eq!(*node, *other) };
753    }
754
755    #[test]
756    fn test_new_list_sync() {
757        let _: SkipList<'_, usize, usize> = SkipList::new();
758    }
759
760    #[test]
761    fn test_insert_sync() {
762        let list = SkipList::new();
763        let mut rng: u16 = rand::random();
764
765        for _ in 0..10_000 {
766            rng ^= rng << 3;
767            rng ^= rng >> 12;
768            rng ^= rng << 7;
769            list.insert(rng, "hello there!");
770        }
771    }
772
773    #[test]
774    fn test_rand_height_sync() {
775        let mut list: SkipList<'_, i32, i32> = SkipList::new();
776        let node = Node::new_rand_height("Hello", "There!", &mut list);
777
778        assert!(!node.is_null());
779        let height = unsafe { (*node).levels.pointers.len() };
780
781        println!("height: {}", height);
782
783        unsafe {
784            println!("{}", *node);
785        }
786
787        unsafe {
788            let _ = Box::from_raw(node);
789        }
790    }
791
792    #[test]
793    fn test_drop() {
794        struct CountOnDrop<K> {
795            key: K,
796            counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
797        }
798
799        impl<K> CountOnDrop<K> {
800            fn new(key: K, counter: std::sync::Arc<std::sync::atomic::AtomicUsize>) -> Self {
801                CountOnDrop { key, counter }
802            }
803
804            fn new_none(key: K) -> Self {
805                CountOnDrop { key, counter: std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0)) }
806            }
807        }
808        impl<K: Eq> PartialEq for CountOnDrop<K> {
809            fn eq(&self, other: &Self) -> bool {
810                self.key == other.key
811            }
812        }
813
814        impl<K: Eq> Eq for CountOnDrop<K> {}
815
816        impl<K: Ord> PartialOrd for CountOnDrop<K> {
817            fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
818                Some(self.key.cmp(&other.key))
819            }
820        }
821
822        impl<K: Ord> Ord for CountOnDrop<K> {
823            fn cmp(&self, other: &Self) -> std::cmp::Ordering {
824                self.key.cmp(&other.key)
825            }
826        }
827
828        impl<K> Drop for CountOnDrop<K> {
829            fn drop(&mut self) {
830                println!("writing to counter!");
831                println!("count: {}", self.counter.load(Ordering::SeqCst));
832                self.counter.fetch_add(1, Ordering::SeqCst);
833                println!("wrote to counter");
834            }
835        }
836
837        let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
838
839        let list = SkipList::new();
840
841        list.insert(CountOnDrop::new(1, counter.clone()), ());
842
843        list.remove(&CountOnDrop::new_none(1));
844
845        // assert_eq!(counter.load(Ordering::SeqCst), 1);
846
847        list.insert(CountOnDrop::new(1, counter.clone()), ());
848
849        list.insert(CountOnDrop::new(1, counter.clone()), ());
850
851        println!("length: {}", list.len());
852
853        list.garbage.domain.eager_reclaim();
854
855        core::sync::atomic::fence(Ordering::SeqCst);
856
857        assert_eq!(counter.load(Ordering::SeqCst), 2);
858
859        drop(list);
860
861        assert_eq!(counter.load(Ordering::SeqCst), 3);
862
863    }
864
865    #[test]
866    fn test_insert_verbose_sync() {
867        let list = SkipList::new();
868
869        list.insert(1, 1);
870
871        list.iter().for_each(|n| println!("k: {},", n.key()));
872
873        list.insert(2, 2);
874
875        list.iter().for_each(|n| println!("k: {},", n.key()));
876
877        list.insert(5, 3);
878
879        list.iter().for_each(|n| println!("k: {},", n.key()));
880    }
881
882    #[test]
883    fn test_remove() {
884        let list = SkipList::new();
885        let mut rng: u16 = rand::random();
886
887        for _ in 0..10_000 {
888            rng ^= rng << 3;
889            rng ^= rng >> 12;
890            rng ^= rng << 7;
891            list.insert(rng, "hello there!");
892        }
893        for _ in 0..10_000 {
894            rng ^= rng << 3;
895            rng ^= rng >> 12;
896            rng ^= rng << 7;
897            list.remove(&rng);
898        }
899    }
900
901    #[test]
902    fn test_verbose_remove() {
903        let list = SkipList::new();
904
905        list.insert(1, 1);
906        list.insert(2, 2);
907        list.insert(2, 2);
908        list.insert(5, 3);
909
910        list.iter().for_each(|n| println!("k: {},", n.key()));
911
912        assert!(list.remove(&1).is_some());
913
914        list.iter().for_each(|n| println!("k: {},", n.key()));
915
916        println!("removing 6");
917        assert!(list.remove(&6).is_none());
918        println!("removing 1");
919        assert!(list.remove(&1).is_none());
920        println!("removing 5");
921        assert!(list.remove(&5).is_some());
922        println!("removing 2");
923        assert!(list.remove(&2).is_some());
924
925        list.iter().for_each(|n| println!("k: {},", n.key()));
926
927        assert_eq!(list.len(), 0);
928    }
929
930    #[test]
931    fn test_find_removed() {
932        let list = SkipList::new();
933
934        list.insert(3, ());
935
936        list.insert(4, ());
937
938        list.insert(5, ());
939
940        assert!(list.find(&3, false).target.is_some());
941        assert!(list.find(&4, false).target.is_some());
942
943        // manually get reference to the nodes
944        let node_3 = unsafe { &mut (*(*list.head.as_ptr()).levels[0].load_ptr()) };
945        let node_4 =
946            unsafe { &mut (*(*(*list.head.as_ptr()).levels[0].load_ptr()).levels[0].load_ptr()) };
947        let node_5 = unsafe {
948            &mut (*(*(*(*list.head.as_ptr()).levels[0].load_ptr()).levels[0].load_ptr()).levels[0]
949                .load_ptr())
950        };
951
952        // make sure it is the right node
953        assert_eq!(node_3.key, 3);
954        println!("{:?}", node_3);
955        assert_eq!(node_4.key, 4);
956        println!("{:?}", node_4);
957        assert_eq!(node_5.key, 5);
958        println!("{:?}", node_5);
959
960        // remove the node logically
961        let _ = node_4.set_removed();
962
963        assert!(list.find(&4, false).target.is_none());
964
965        println!("{:?}", list.find(&3, false));
966
967        assert!(!node_3.removed());
968
969        assert!(list.remove(&4).is_none());
970
971        // remove the node logically
972        node_4.height_and_removed.store(
973            node_4.height_and_removed.load(Ordering::SeqCst) & (usize::MAX >> 1),
974            Ordering::SeqCst,
975        );
976
977        assert!(!node_4.removed());
978
979        assert!(list.remove(&4).is_some());
980    }
981
982    #[test]
983    fn test_sync_remove() {
984        use std::sync::Arc;
985        let list = Arc::new(SkipList::new());
986        let mut rng = rand::thread_rng();
987
988        for _ in 0..10_000 {
989            list.insert(rng.gen::<u16>(), ());
990        }
991        let threads = (0..20)
992            .map(|_| {
993                let list = list.clone();
994                std::thread::spawn(move || {
995                    let mut rng = rand::thread_rng();
996                    for _ in 0..1_000 {
997                        let target = &rng.gen::<u16>();
998                        list.remove(&target);
999                    }
1000                })
1001            })
1002            .collect::<Vec<_>>();
1003
1004        for thread in threads {
1005            thread.join().unwrap()
1006        }
1007
1008        list.iter().for_each(|e| println!("key: {}", e.key));
1009    }
1010
1011    #[test]
1012    fn test_sync_insert() {
1013        use std::sync::Arc;
1014        let list = Arc::new(SkipList::new());
1015
1016        let threads = (0..20)
1017            .map(|_| {
1018                let list = list.clone();
1019                std::thread::spawn(move || {
1020                    let mut rng = rand::thread_rng();
1021                    for _ in 0..1_000 {
1022                        let target = rng.gen::<u8>();
1023
1024                        list.insert(target, ());
1025                    }
1026                })
1027            })
1028            .collect::<Vec<_>>();
1029
1030        for thread in threads {
1031            thread.join().unwrap()
1032        }
1033
1034        list.iter().for_each(|e| println!("key: {}", e.key));
1035    }
1036
1037    #[test]
1038    fn test_sync_inmove() {
1039        use std::sync::Arc;
1040        let list = Arc::new(SkipList::new());
1041
1042        let threads = (0..20)
1043            .map(|_| {
1044                let list = list.clone();
1045                std::thread::spawn(move || {
1046                    let mut rng = rand::thread_rng();
1047                    for _ in 0..5_000 {
1048                        let target = rng.gen::<u8>();
1049                        if rng.gen::<u8>() % 5 == 0 {
1050                            list.remove(&target);
1051                        } else {
1052                            list.insert(target, ());
1053                        }
1054                    }
1055                })
1056            })
1057            .collect::<Vec<_>>();
1058
1059        for thread in threads {
1060            thread.join().unwrap()
1061        }
1062
1063        list.iter().for_each(|e| println!("key: {}", e.key));
1064    }
1065
1066    #[test]
1067    fn test_sync_iterate() {
1068        use std::sync::Arc;
1069        let list = Arc::new(SkipList::new());
1070
1071        let threads = (0..20)
1072            .map(|_| {
1073                let list = list.clone();
1074                std::thread::spawn(move || {
1075                    let mut rng = rand::thread_rng();
1076                    for _ in 0..1_000 {
1077                        let target = rng.gen::<u8>();
1078                        if rng.gen::<u8>() % 5 == 0 {
1079                            list.remove(&target);
1080                        } else {
1081                            list.insert(target, ());
1082                        }
1083                    }
1084                })
1085            })
1086            .collect::<Vec<_>>();
1087
1088        for _ in 0..5 {
1089            list.iter().for_each(|e| println!("key: {}", e.key()));
1090        }
1091
1092        for thread in threads {
1093            thread.join().unwrap()
1094        }
1095
1096        let list = Arc::<SkipList<'_, u8, ()>>::try_unwrap(list).unwrap();
1097
1098        list.into_iter().for_each(|(k, _)| println!("key: {}", k))
1099    }
1100}