use std::marker::PhantomData;
use std::sync::atomic::Ordering;
use crate::node::{is_tombstone, node_key, node_seq, node_value, tower_load};
use crate::ConcurrentSkipList;
#[derive(Debug, Clone, Copy)]
pub struct Entry<'a> {
pub key: &'a [u8],
pub value: &'a [u8],
pub is_tombstone: bool,
}
#[inline]
fn advance_node(current: *const u8) -> *const u8 {
let next = unsafe { tower_load(current, 0) };
if next.is_null() {
std::ptr::null()
} else {
next.ptr()
}
}
#[derive(Debug)]
pub struct Snapshot<'a> {
pub(crate) head: *const u8,
pub(crate) snap_seq: u64,
pub(crate) _owner: PhantomData<&'a ConcurrentSkipList>,
}
unsafe impl<'a> Send for Snapshot<'a> where ConcurrentSkipList: Sync {}
unsafe impl<'a> Sync for Snapshot<'a> where ConcurrentSkipList: Sync {}
impl<'a> Snapshot<'a> {
pub fn iter(&self) -> SnapshotIter<'a> {
SnapshotIter {
current: self.head,
snap_seq: self.snap_seq,
_owner: PhantomData,
}
}
}
#[derive(Debug)]
pub struct SnapshotIter<'a> {
current: *const u8,
snap_seq: u64,
_owner: PhantomData<&'a ConcurrentSkipList>,
}
unsafe impl<'a> Send for SnapshotIter<'a> where ConcurrentSkipList: Sync {}
unsafe impl<'a> Sync for SnapshotIter<'a> where ConcurrentSkipList: Sync {}
impl<'a> Iterator for SnapshotIter<'a> {
type Item = Entry<'a>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
loop {
self.current = advance_node(self.current);
if self.current.is_null() {
return None;
}
if unsafe { node_seq(self.current) } >= self.snap_seq {
continue;
}
return Some(Entry {
key: unsafe { node_key(self.current) },
value: unsafe { node_value(self.current) },
is_tombstone: unsafe { is_tombstone(self.current) },
});
}
}
}
#[derive(Debug)]
pub struct Iter<'a> {
pub(crate) current: *const u8,
pub(crate) _owner: PhantomData<&'a ConcurrentSkipList>,
}
unsafe impl<'a> Send for Iter<'a> where ConcurrentSkipList: Sync {}
unsafe impl<'a> Sync for Iter<'a> where ConcurrentSkipList: Sync {}
impl<'a> Iterator for Iter<'a> {
type Item = Entry<'a>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.current = advance_node(self.current);
if self.current.is_null() {
return None;
}
Some(Entry {
key: unsafe { node_key(self.current) },
value: unsafe { node_value(self.current) },
is_tombstone: unsafe { is_tombstone(self.current) },
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, None)
}
}
pub struct Cursor<'a> {
pub(crate) current: *const u8,
pub(crate) _owner: PhantomData<&'a ConcurrentSkipList>,
}
unsafe impl<'a> Send for Cursor<'a> where ConcurrentSkipList: Sync {}
unsafe impl<'a> Sync for Cursor<'a> where ConcurrentSkipList: Sync {}
impl<'a> Cursor<'a> {
pub fn seek(&mut self, skiplist: &'a ConcurrentSkipList, target: &[u8]) -> bool {
let mut x = skiplist.skiplist.head;
let h = skiplist.skiplist.height.load(Ordering::Relaxed);
let mut level = if h > 0 { h - 1 } else { 0 };
loop {
let next = unsafe { crate::node::tower_load(x, level) };
if next.is_null() {
if level == 0 {
self.current = std::ptr::null();
return false;
}
level -= 1;
continue;
}
let next_node = next.ptr();
let next_key = unsafe { node_key(next_node) };
match crate::util::compare_keys(next_key, target) {
std::cmp::Ordering::Less => {
x = next_node;
}
std::cmp::Ordering::Equal | std::cmp::Ordering::Greater => {
if level == 0 {
self.current = next_node;
return true;
}
level -= 1;
}
}
}
}
pub fn next_entry(&mut self) -> bool {
if self.current.is_null() {
return false;
}
self.current = advance_node(self.current);
!self.current.is_null()
}
pub fn entry(&self) -> Option<Entry<'a>> {
if self.current.is_null() {
return None;
}
Some(Entry {
key: unsafe { node_key(self.current) },
value: unsafe { node_value(self.current) },
is_tombstone: unsafe { is_tombstone(self.current) },
})
}
pub fn valid(&self) -> bool {
!self.current.is_null()
}
}
impl<'a> Iterator for Cursor<'a> {
type Item = Entry<'a>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if !self.valid() {
return None;
}
let entry = self.entry();
self.next_entry();
entry
}
}