use maybe_dangling::MaybeDangling;
use fast_slice_utils::find_prefix_overlap;
use crate::alloc::{Allocator, GlobalAlloc};
use crate::utils::ByteMask;
use crate::trie_node::*;
use crate::PathMap;
pub use crate::write_zipper::*;
pub use crate::trie_ref::*;
pub use crate::zipper_head::*;
pub use crate::product_zipper::{ProductZipper, ProductZipperG, ZipperProduct};
pub use crate::overlay_zipper::{OverlayZipper};
pub use crate::prefix_zipper::{PrefixZipper};
pub use crate::empty_zipper::{EmptyZipper};
pub use crate::poly_zipper::PolyZipper;
use crate::zipper_tracking::*;
pub trait Zipper {
fn path_exists(&self) -> bool;
fn is_val(&self) -> bool;
#[deprecated] fn is_value(&self) -> bool {
self.is_val()
}
fn child_count(&self) -> usize;
fn child_mask(&self) -> ByteMask;
}
pub trait ZipperValues<V> {
fn val(&self) -> Option<&V>;
#[deprecated] fn value(&self) -> Option<&V> {
self.val()
}
}
pub trait ZipperForking<V> {
type ReadZipperT<'a>: ZipperAbsolutePath + ZipperIteration + ZipperValues<V> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a>;
}
pub trait ZipperSubtries<V: Clone + Send + Sync, A: Allocator = GlobalAlloc>: ZipperValues<V> + zipper_priv::ZipperPriv<V=V, A=A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>>;
}
pub trait ZipperMoving: Zipper {
fn at_root(&self) -> bool {
self.path().len() == 0
}
fn reset(&mut self) {
while !self.at_root() {
self.ascend_byte();
}
}
fn path(&self) -> &[u8];
fn val_count(&self) -> usize;
fn move_to_path<K: AsRef<[u8]>>(&mut self, path: K) -> usize {
let path = path.as_ref();
let p = self.path();
let overlap = find_prefix_overlap(path, p);
let to_ascend = p.len() - overlap;
if overlap == 0 { self.reset();
self.descend_to(path);
overlap
} else {
self.ascend(to_ascend);
self.descend_to(&path[overlap..]);
overlap
}
}
fn descend_to<K: AsRef<[u8]>>(&mut self, k: K);
fn descend_to_check<K: AsRef<[u8]>>(&mut self, k: K) -> bool {
self.descend_to(k);
self.path_exists()
}
fn descend_to_existing<K: AsRef<[u8]>>(&mut self, k: K) -> usize {
let k = k.as_ref();
let mut i = 0;
while i < k.len() {
self.descend_to_byte(k[i]);
if !self.path_exists() {
self.ascend_byte();
return i
}
i += 1
}
i
}
fn descend_to_val<K: AsRef<[u8]>>(&mut self, k: K) -> usize {
let k = k.as_ref();
let mut i = 0;
while i < k.len() {
self.descend_to_byte(k[i]);
if !self.path_exists() {
self.ascend_byte();
return i
}
i += 1;
if self.is_val() {
return i
}
}
i
}
#[deprecated] fn descend_to_value<K: AsRef<[u8]>>(&mut self, k: K) -> usize {
self.descend_to_val(k)
}
fn descend_to_byte(&mut self, k: u8) {
self.descend_to(&[k])
}
fn descend_to_existing_byte(&mut self, k: u8) -> bool {
self.descend_to_byte(k);
if self.path_exists() {
true
} else {
self.ascend_byte();
false
}
}
fn descend_indexed_byte(&mut self, idx: usize) -> bool {
let mask = self.child_mask();
let child_byte = match mask.indexed_bit::<true>(idx) {
Some(byte) => byte,
None => {
return false
}
};
self.descend_to_byte(child_byte);
debug_assert!(self.path_exists());
true
}
#[deprecated] fn descend_indexed_branch(&mut self, idx: usize) -> bool {
self.descend_indexed_byte(idx)
}
fn descend_first_byte(&mut self) -> bool {
self.descend_indexed_byte(0)
}
fn descend_last_byte(&mut self) -> bool {
let cc = self.child_count();
if cc == 0 { false }
else { self.descend_indexed_byte( cc- 1) }
}
fn descend_until(&mut self) -> bool {
let mut descended = false;
while self.child_count() == 1 {
descended = true;
self.descend_first_byte();
if self.is_val() {
break;
}
}
descended
}
fn ascend(&mut self, steps: usize) -> bool;
fn ascend_byte(&mut self) -> bool {
self.ascend(1)
}
fn ascend_until(&mut self) -> bool;
fn ascend_until_branch(&mut self) -> bool;
fn to_next_sibling_byte(&mut self) -> bool {
let cur_byte = match self.path().last() {
Some(byte) => *byte,
None => return false
};
if !self.ascend_byte() {
return false
}
let mask = self.child_mask();
match mask.next_bit(cur_byte) {
Some(byte) => {
self.descend_to_byte(byte);
debug_assert!(self.path_exists());
true
},
None => {
self.descend_to_byte(cur_byte);
false
}
}
}
fn to_prev_sibling_byte(&mut self) -> bool {
let cur_byte = match self.path().last() {
Some(byte) => *byte,
None => return false
};
if !self.ascend_byte() {
return false
}
let mask = self.child_mask();
match mask.prev_bit(cur_byte) {
Some(byte) => {
self.descend_to_byte(byte);
debug_assert!(self.path_exists());
true
},
None => {
self.descend_to_byte(cur_byte);
debug_assert!(self.path_exists());
false
}
}
}
fn to_next_step(&mut self) -> bool {
if self.child_count() == 0 {
while !self.to_next_sibling_byte() {
if !self.ascend_byte() {
return false;
}
}
} else {
return self.descend_first_byte()
}
true
}
}
pub trait ZipperReadOnlyValues<'a, V>: ZipperValues<V> {
fn get_val(&self) -> Option<&'a V>;
#[deprecated] fn get_value(&self) -> Option<&'a V> {
self.get_val()
}
}
pub struct ReadZipperWitness<V: Clone + Send + Sync, A: Allocator>(pub(crate) Option<TrieNodeODRc<V, A>>);
pub trait ZipperReadOnlyConditionalValues<'a, V>: ZipperValues<V> {
type WitnessT;
fn witness<'w>(&self) -> Self::WitnessT;
fn get_val_with_witness<'w>(&self, witness: &'w Self::WitnessT) -> Option<&'w V> where 'a: 'w;
}
pub trait ZipperReadOnlyIteration<'a, V>: ZipperReadOnlyValues<'a, V> + ZipperIteration {
fn to_next_get_val(&mut self) -> Option<&'a V> {
if self.to_next_val() {
let val = self.get_val();
debug_assert!(val.is_some());
val
} else {
None
}
}
#[deprecated] fn to_next_get_value(&mut self) -> Option<&'a V> {
self.to_next_get_val()
}
}
pub trait ZipperReadOnlyConditionalIteration<'a, V>: ZipperReadOnlyConditionalValues<'a, V> + ZipperIteration {
fn to_next_get_val_with_witness<'w>(&mut self, witness: &'w Self::WitnessT) -> Option<&'w V> where 'a: 'w {
if self.to_next_val() {
let val = self.get_val_with_witness(witness);
debug_assert!(val.is_some());
val
} else {
None
}
}
}
pub trait ZipperReadOnlySubtries<'a, V: Clone + Send + Sync, A: Allocator = GlobalAlloc>: ZipperSubtries<V, A> + ZipperReadOnlyPriv<'a, V, A> {
type TrieRefT: Into<TrieRef<'a, V, A>> where V: 'a, A: 'a;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> Self::TrieRefT;
}
pub trait ZipperIteration: ZipperMoving {
fn to_next_val(&mut self) -> bool {
loop {
if self.descend_first_byte() {
if self.is_val() {
return true
}
if self.descend_until() {
if self.is_val() {
return true
}
}
} else {
'ascending: loop {
if self.to_next_sibling_byte() {
if self.is_val() {
return true
}
break 'ascending
} else {
self.ascend_byte();
if self.at_root() {
return false
}
}
}
}
}
}
fn descend_last_path(&mut self) -> bool {
let mut any = false;
while self.descend_last_byte() {
any = true;
self.descend_until();
}
any
}
fn descend_first_k_path(&mut self, k: usize) -> bool {
k_path_default_internal(self, k, self.path().len())
}
fn to_next_k_path(&mut self, k: usize) -> bool {
let base_idx = if self.path().len() >= k {
self.path().len() - k
} else {
return false
};
k_path_default_internal(self, k, base_idx)
}
}
#[inline]
fn k_path_default_internal<Z: ZipperMoving + ?Sized>(z: &mut Z, k: usize, base_idx: usize) -> bool {
loop {
if z.path().len() < base_idx + k {
while z.descend_first_byte() {
if z.path().len() == base_idx + k { return true }
}
}
if z.to_next_sibling_byte() {
if z.path().len() == base_idx + k { return true }
continue
}
while z.path().len() > base_idx {
z.ascend_byte();
if z.path().len() == base_idx { return false }
if z.to_next_sibling_byte() { break }
}
}
}
pub trait ZipperAbsolutePath: ZipperMoving {
fn origin_path(&self) -> &[u8];
fn root_prefix_path(&self) -> &[u8];
}
pub trait ZipperConcrete {
fn shared_node_id(&self) -> Option<u64>;
fn is_shared(&self) -> bool;
}
pub trait ZipperPathBuffer: ZipperMoving {
unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8];
fn prepare_buffers(&mut self);
fn reserve_buffers(&mut self, path_len: usize, stack_depth: usize);
}
pub(crate) mod zipper_priv {
use super::*;
pub trait ZipperPriv {
type V: Clone + Send + Sync;
type A: Allocator;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A>;
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>>;
}
pub trait ZipperReadOnlyPriv<'a, V: Clone + Send + Sync, A: Allocator> {
fn borrow_raw_parts(&self) -> (TaggedNodeRef<'_, V, A>, &[u8], Option<&V>);
fn take_core(&mut self) -> Option<ReadZipperCore<'a, 'static, V, A>>;
}
}
use zipper_priv::*;
impl<Z> Zipper for &mut Z where Z: Zipper {
fn path_exists(&self) -> bool { (**self).path_exists() }
fn is_val(&self) -> bool { (**self).is_val() }
fn child_count(&self) -> usize { (**self).child_count() }
fn child_mask(&self) -> ByteMask { (**self).child_mask() }
}
impl<Z> ZipperMoving for &mut Z where Z: ZipperMoving + Zipper {
fn at_root(&self) -> bool { (**self).at_root() }
fn reset(&mut self) { (**self).reset() }
fn path(&self) -> &[u8] { (**self).path() }
fn val_count(&self) -> usize { (**self).val_count() }
fn descend_to<K: AsRef<[u8]>>(&mut self, k: K) { (**self).descend_to(k) }
fn descend_to_check<K: AsRef<[u8]>>(&mut self, k: K) -> bool { (**self).descend_to_check(k) }
fn descend_to_existing<K: AsRef<[u8]>>(&mut self, k: K) -> usize { (**self).descend_to_existing(k) }
fn descend_to_val<K: AsRef<[u8]>>(&mut self, k: K) -> usize { (**self).descend_to_val(k) }
fn descend_to_byte(&mut self, k: u8) { (**self).descend_to_byte(k) }
fn descend_to_existing_byte(&mut self, k: u8) -> bool { (**self).descend_to_existing_byte(k) }
fn descend_indexed_byte(&mut self, idx: usize) -> bool { (**self).descend_indexed_byte(idx) }
fn descend_first_byte(&mut self) -> bool { (**self).descend_first_byte() }
fn descend_until(&mut self) -> bool { (**self).descend_until() }
fn ascend(&mut self, steps: usize) -> bool { (**self).ascend(steps) }
fn ascend_byte(&mut self) -> bool { (**self).ascend_byte() }
fn ascend_until(&mut self) -> bool { (**self).ascend_until() }
fn ascend_until_branch(&mut self) -> bool { (**self).ascend_until_branch() }
fn to_next_sibling_byte(&mut self) -> bool { (**self).to_next_sibling_byte() }
fn to_prev_sibling_byte(&mut self) -> bool { (**self).to_prev_sibling_byte() }
fn to_next_step(&mut self) -> bool { (**self).to_next_step() }
}
impl<Z> ZipperAbsolutePath for &mut Z where Z: ZipperAbsolutePath {
fn origin_path(&self) -> &[u8] { (**self).origin_path() }
fn root_prefix_path(&self) -> &[u8] { (**self).root_prefix_path() }
}
impl<Z> ZipperIteration for &mut Z where Z: ZipperIteration {
fn to_next_val(&mut self) -> bool { (**self).to_next_val() }
fn descend_first_k_path(&mut self, k: usize) -> bool { (**self).descend_first_k_path(k) }
fn to_next_k_path(&mut self, k: usize) -> bool { (**self).to_next_k_path(k) }
}
impl<V, Z> ZipperValues<V> for &mut Z where Z: ZipperValues<V> {
fn val(&self) -> Option<&V> { (**self).val() }
}
impl<V, Z> ZipperForking<V> for &mut Z where Z: ZipperForking<V> {
type ReadZipperT<'a> = Z::ReadZipperT<'a> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> { (**self).fork_read_zipper() }
}
impl<V: Clone + Send + Sync, Z, A: Allocator> ZipperSubtries<V, A> for &mut Z where Z: ZipperSubtries<V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> { (**self).make_map() }
}
impl<'a, V: Clone + Send + Sync, Z> ZipperReadOnlyValues<'a, V> for &mut Z where Z: ZipperReadOnlyValues<'a, V>, Self: ZipperValues<V> {
fn get_val(&self) -> Option<&'a V> { (**self).get_val() }
}
impl<'a, V: Clone + Send + Sync, Z> ZipperReadOnlyConditionalValues<'a, V> for &mut Z where Z: ZipperReadOnlyConditionalValues<'a, V>, Self: ZipperValues<V> {
type WitnessT = Z::WitnessT;
fn witness<'w>(&self) -> Z::WitnessT { (**self).witness() }
fn get_val_with_witness<'w>(&self, witness: &'w Z::WitnessT) -> Option<&'w V> where 'a: 'w { (**self).get_val_with_witness(witness) }
}
impl<'a, V, Z> ZipperReadOnlyIteration<'a, V> for &mut Z where Z: ZipperReadOnlyIteration<'a, V>, Self: ZipperReadOnlyValues<'a, V> + ZipperIteration {
fn to_next_get_val(&mut self) -> Option<&'a V> { (**self).to_next_get_val() }
}
impl<'a, V, Z> ZipperReadOnlyConditionalIteration<'a, V> for &mut Z where Z: ZipperReadOnlyConditionalIteration<'a, V>, Self: ZipperReadOnlyConditionalValues<'a, V, WitnessT = Z::WitnessT> + ZipperIteration {
fn to_next_get_val_with_witness<'w>(&mut self, witness: &'w Self::WitnessT) -> Option<&'w V> where 'a: 'w { (**self).to_next_get_val_with_witness(witness) }
}
impl<'a, V: Clone + Send + Sync + 'a, Z, A: Allocator + 'a> ZipperReadOnlySubtries<'a, V, A> for &mut Z where Z: ZipperReadOnlySubtries<'a, V, A>, Self: ZipperReadOnlyPriv<'a, V, A> + ZipperSubtries<V, A> {
type TrieRefT = <Z as ZipperReadOnlySubtries<'a, V, A>>::TrieRefT;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> Self::TrieRefT { (**self).trie_ref_at_path(path) }
}
impl<Z> ZipperConcrete for &mut Z where Z: ZipperConcrete {
#[inline]
fn shared_node_id(&self) -> Option<u64> { (**self).shared_node_id() }
#[inline]
fn is_shared(&self) -> bool { (**self).is_shared() }
}
impl<V: Clone + Send + Sync, Z, A: Allocator> ZipperPriv for &mut Z where Z: ZipperPriv<V=V, A=A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> { (**self).get_focus() }
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> { (**self).try_borrow_focus() }
}
impl<Z> ZipperPathBuffer for &mut Z where Z: ZipperPathBuffer {
unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] { unsafe{ (**self).origin_path_assert_len(len) } }
fn prepare_buffers(&mut self) { (**self).prepare_buffers() }
fn reserve_buffers(&mut self, path_len: usize, stack_depth: usize) { (**self).reserve_buffers(path_len, stack_depth) }
}
impl<'a, V: Clone + Send + Sync, Z, A: Allocator> ZipperReadOnlyPriv<'a, V, A> for &mut Z where Z: ZipperReadOnlyPriv<'a, V, A> {
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'z, V, A>, &'z [u8], Option<&'z V>) { (**self).borrow_raw_parts() }
fn take_core(&mut self) -> Option<ReadZipperCore<'a, 'static, V, A>> { (**self).take_core() }
}
#[derive(Clone)]
pub struct ReadZipperTracked<'a, 'path, V: Clone + Send + Sync, A: Allocator = GlobalAlloc> {
z: ReadZipperCore<'a, 'path, V, A>,
#[allow(unused)]
tracker: Option<ZipperTracker<TrackingRead>>,
}
impl<V: Clone + Send + Sync, A: Allocator> Drop for ReadZipperTracked<'_, '_, V, A> {
fn drop(&mut self) { }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for ReadZipperTracked<'_, '_, V, A>{
#[inline]
fn path_exists(&self) -> bool { self.z.path_exists() }
fn is_val(&self) -> bool { self.z.is_val() }
fn child_count(&self) -> usize { self.z.child_count() }
fn child_mask(&self) -> ByteMask { self.z.child_mask() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for ReadZipperTracked<'_, '_, V, A>{
fn val(&self) -> Option<&V> { self.z.val() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for ReadZipperTracked<'_, '_, V, A>{
type ReadZipperT<'a> = ReadZipperUntracked<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
let forked_zipper = self.z.fork_read_zipper();
Self::ReadZipperT::new_forked_with_inner_zipper(forked_zipper)
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for ReadZipperTracked<'_, '_, V, A>{
fn make_map(&self) -> Option<PathMap<Self::V, A>> { self.z.make_map() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperMoving for ReadZipperTracked<'trie, '_, V, A> {
fn at_root(&self) -> bool { self.z.at_root() }
fn reset(&mut self) { self.z.reset() }
#[inline]
fn path(&self) -> &[u8] { self.z.path() }
fn val_count(&self) -> usize { self.z.val_count() }
fn descend_to<K: AsRef<[u8]>>(&mut self, k: K) { self.z.descend_to(k) }
fn descend_to_check<K: AsRef<[u8]>>(&mut self, k: K) -> bool { self.z.descend_to_check(k) }
fn descend_to_existing<K: AsRef<[u8]>>(&mut self, k: K) -> usize { self.z.descend_to_existing(k) }
fn descend_to_val<K: AsRef<[u8]>>(&mut self, k: K) -> usize { self.z.descend_to_val(k) }
fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) }
fn descend_to_existing_byte(&mut self, k: u8) -> bool { self.z.descend_to_existing_byte(k) }
fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) }
fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() }
fn descend_until(&mut self) -> bool { self.z.descend_until() }
fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() }
fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() }
fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) }
fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() }
fn ascend_until(&mut self) -> bool { self.z.ascend_until() }
fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() }
fn to_next_step(&mut self) -> bool { self.z.to_next_step() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalValues<'trie, V> for ReadZipperTracked<'trie, '_, V, A> {
type WitnessT = ReadZipperWitness<V, A>;
fn witness<'w>(&self) -> ReadZipperWitness<V, A> { self.z.witness() }
fn get_val_with_witness<'w>(&self, witness: &'w ReadZipperWitness<V, A>) -> Option<&'w V> where 'trie: 'w { self.z.get_val_with_witness(witness) }
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlySubtries<'a, V, A> for ReadZipperTracked<'a, '_, V, A> {
type TrieRefT = TrieRefOwned<V, A>;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> TrieRefOwned<V, A> {
let path = path.as_ref();
TrieRefOwned::new_with_key_and_path_in(self.z.focus_parent(), self.val(), self.z.node_key(), path, self.z.alloc.clone())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for ReadZipperTracked<'_, '_, V, A> {
#[inline]
fn shared_node_id(&self) -> Option<u64> { self.z.shared_node_id() }
#[inline]
fn is_shared(&self) -> bool { self.z.is_shared() }
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlyPriv<'a, V, A> for ReadZipperTracked<'a, '_, V, A> {
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'z, V, A>, &'z [u8], Option<&'z V>) { self.z.borrow_raw_parts() }
fn take_core(&mut self) -> Option<ReadZipperCore<'a, 'static, V, A>> {
let mut temp_core = ReadZipperCore::new_with_node_and_path_internal_in(OwnedOrBorrowed::None, &[], 0, None, self.z.alloc.clone());
core::mem::swap(&mut temp_core, &mut self.z);
Some(temp_core.make_static_path())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> zipper_priv::ZipperPriv for ReadZipperTracked<'_, '_, V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> { self.z.get_focus() }
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> { self.z.try_borrow_focus() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPathBuffer for ReadZipperTracked<'trie, '_, V, A> {
unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] { unsafe{ self.z.origin_path_assert_len(len) } }
fn prepare_buffers(&mut self) { self.z.prepare_buffers() }
fn reserve_buffers(&mut self, path_len: usize, stack_depth: usize) { self.z.reserve_buffers(path_len, stack_depth) }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperIteration for ReadZipperTracked<'trie, '_, V, A> {
fn to_next_val(&mut self) -> bool { self.z.to_next_val() }
fn descend_first_k_path(&mut self, k: usize) -> bool { self.z.descend_first_k_path(k) }
fn to_next_k_path(&mut self, k: usize) -> bool { self.z.to_next_k_path(k) }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalIteration<'trie, V> for ReadZipperTracked<'trie, '_, V, A> { }
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperAbsolutePath for ReadZipperTracked<'trie, '_, V, A> {
fn origin_path(&self) -> &[u8] { self.z.origin_path() }
fn root_prefix_path(&self) -> &[u8] { self.z.root_prefix_path() }
}
impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ReadZipperTracked<'a, 'path, V, A> {
pub(crate) fn new_with_node_and_path_in(root_node: &'a TrieNodeODRc<V, A>, owned_root: bool, path: &'path [u8], root_prefix_len: usize, root_key_start: usize, root_val: Option<&'a V>, alloc: A, tracker: Option<ZipperTracker<TrackingRead>>) -> Self {
let core = ReadZipperCore::new_with_node_and_path_in(root_node, owned_root, path, root_prefix_len, root_key_start, root_val, alloc);
Self { z: core, tracker }
}
pub(crate) fn new_with_node_and_cloned_path_in(root_node: &'a TrieNodeODRc<V, A>, owned_root: bool, path: &[u8], root_prefix_len: usize, root_key_start: usize, root_val: Option<&'a V>, alloc: A, tracker: Option<ZipperTracker<TrackingRead>>) -> Self {
let core = ReadZipperCore::new_with_node_and_cloned_path_in(root_node, owned_root, path, root_prefix_len, root_key_start, root_val, alloc);
Self { z: core, tracker }
}
}
#[derive(Clone)]
pub struct ReadZipperUntracked<'a, 'path, V: Clone + Send + Sync, A: Allocator = GlobalAlloc> {
z: ReadZipperCore<'a, 'path, V, A>,
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for ReadZipperUntracked<'_, '_, V, A> {
#[inline]
fn path_exists(&self) -> bool { self.z.path_exists() }
fn is_val(&self) -> bool { self.z.is_val() }
fn child_count(&self) -> usize { self.z.child_count() }
fn child_mask(&self) -> ByteMask { self.z.child_mask() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for ReadZipperUntracked<'_, '_, V, A> {
fn val(&self) -> Option<&V> { self.z.val() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for ReadZipperUntracked<'_, '_, V, A> {
type ReadZipperT<'a> = ReadZipperUntracked<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
let forked_zipper = self.z.fork_read_zipper();
Self::ReadZipperT::new_forked_with_inner_zipper(forked_zipper)
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for ReadZipperUntracked<'_, '_, V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> { self.z.make_map() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperMoving for ReadZipperUntracked<'trie, '_, V, A> {
fn at_root(&self) -> bool { self.z.at_root() }
fn reset(&mut self) { self.z.reset() }
#[inline]
fn path(&self) -> &[u8] { self.z.path() }
fn val_count(&self) -> usize { self.z.val_count() }
fn descend_to<K: AsRef<[u8]>>(&mut self, k: K) { self.z.descend_to(k) }
fn descend_to_check<K: AsRef<[u8]>>(&mut self, k: K) -> bool { self.z.descend_to_check(k) }
fn descend_to_existing<K: AsRef<[u8]>>(&mut self, k: K) -> usize { self.z.descend_to_existing(k) }
fn descend_to_val<K: AsRef<[u8]>>(&mut self, k: K) -> usize { self.z.descend_to_val(k) }
fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) }
fn descend_to_existing_byte(&mut self, k: u8) -> bool { self.z.descend_to_existing_byte(k) }
fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) }
fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() }
fn descend_until(&mut self) -> bool { self.z.descend_until() }
fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() }
fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() }
fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) }
fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() }
fn ascend_until(&mut self) -> bool { self.z.ascend_until() }
fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() }
fn to_next_step(&mut self) -> bool { self.z.to_next_step() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyValues<'trie, V> for ReadZipperUntracked<'trie, '_, V, A> {
fn get_val(&self) -> Option<&'trie V> { unsafe{ self.z.get_val() } }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalValues<'trie, V> for ReadZipperUntracked<'trie, '_, V, A> {
type WitnessT = ();
fn witness<'w>(&self) -> Self::WitnessT { () }
fn get_val_with_witness<'w>(&self, _witness: &'w Self::WitnessT) -> Option<&'w V> where 'trie: 'w { self.get_val() }
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlySubtries<'a, V, A> for ReadZipperUntracked<'a, '_, V, A> {
type TrieRefT = TrieRefBorrowed<'a, V, A>;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> TrieRefBorrowed<'a, V, A> {
let path = path.as_ref();
TrieRefBorrowed::new_with_key_and_path_in(self.z.focus_parent_borrowed(), self.get_val(), self.z.node_key(), path, self.z.alloc.clone())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for ReadZipperUntracked<'_, '_, V, A> {
#[inline]
fn shared_node_id(&self) -> Option<u64> { self.z.shared_node_id() }
#[inline]
fn is_shared(&self) -> bool { self.z.is_shared() }
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlyPriv<'a, V, A> for ReadZipperUntracked<'a, '_, V, A>{
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'z, V, A>, &'z [u8], Option<&'z V>) { self.z.borrow_raw_parts() }
fn take_core(&mut self) -> Option<ReadZipperCore<'a, 'static, V, A>> {
let mut temp_core = ReadZipperCore::new_with_node_and_path_internal_in(OwnedOrBorrowed::None, &[], 0, None, self.z.alloc.clone());
core::mem::swap(&mut temp_core, &mut self.z);
Some(temp_core.make_static_path())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> zipper_priv::ZipperPriv for ReadZipperUntracked<'_, '_, V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> { self.z.get_focus() }
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> { self.z.try_borrow_focus() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPathBuffer for ReadZipperUntracked<'trie, '_, V, A> {
unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] { unsafe{ self.z.origin_path_assert_len(len) } }
fn prepare_buffers(&mut self) { self.z.prepare_buffers() }
fn reserve_buffers(&mut self, path_len: usize, stack_depth: usize) { self.z.reserve_buffers(path_len, stack_depth) }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperIteration for ReadZipperUntracked<'trie, '_, V, A> {
fn to_next_val(&mut self) -> bool { self.z.to_next_val() }
fn descend_first_k_path(&mut self, k: usize) -> bool { self.z.descend_first_k_path(k) }
fn to_next_k_path(&mut self, k: usize) -> bool { self.z.to_next_k_path(k) }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyIteration<'trie, V> for ReadZipperUntracked<'trie, '_, V, A> {
fn to_next_get_val(&mut self) -> Option<&'trie V> { unsafe{ self.z.to_next_get_val() } }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalIteration<'trie, V> for ReadZipperUntracked<'trie, '_, V, A> { }
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperAbsolutePath for ReadZipperUntracked<'trie, '_, V, A> {
fn origin_path(&self) -> &[u8] { self.z.origin_path() }
fn root_prefix_path(&self) -> &[u8] { self.z.root_prefix_path() }
}
impl<'a, 'path, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ReadZipperUntracked<'a, 'path, V, A> {
pub(crate) fn new_with_node_and_path_in(root_node: &'a TrieNodeODRc<V, A>, path: &'path [u8], root_prefix_len: usize, root_key_start: usize, root_val: Option<&'a V>, alloc: A) -> Self {
let core = ReadZipperCore::new_with_node_and_path_in(root_node, false, path, root_prefix_len, root_key_start, root_val, alloc);
Self { z: core }
}
pub(crate) fn new_with_node_and_path_internal_in(root_node: &'a TrieNodeODRc<V, A>, path: &'path [u8], root_key_start: usize, root_val: Option<&'a V>, alloc: A) -> Self {
let core = ReadZipperCore::new_with_node_and_path_internal_in(OwnedOrBorrowed::Borrowed(root_node), path, root_key_start, root_val, alloc);
Self { z: core }
}
pub(crate) fn new_with_node_and_cloned_path_in(root_node: &'a TrieNodeODRc<V, A>, path: &[u8], root_prefix_len: usize, root_key_start: usize, root_val: Option<&'a V>, alloc: A) -> Self {
let core = ReadZipperCore::new_with_node_and_cloned_path_in(root_node, false, path, root_prefix_len, root_key_start, root_val, alloc);
Self { z: core }
}
pub(crate) fn new_forked_with_inner_zipper(core: ReadZipperCore<'a, 'path, V, A>) -> Self {
ReadZipperUntracked{ z: core }
}
}
impl<'a, 'path, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> std::iter::IntoIterator for ReadZipperUntracked<'a, 'path, V, A> {
type Item = (Vec<u8>, &'a V);
type IntoIter = ReadZipperIter<'a, 'path, V, A>;
fn into_iter(self) -> Self::IntoIter {
let zip = core::mem::ManuallyDrop::new(self);
let core_z = unsafe { std::ptr::read(&zip.z) };
ReadZipperIter {
started: false,
zipper: Some(core_z),
}
}
}
pub struct ReadZipperOwned<V: Clone + Send + Sync + 'static, A: Allocator + 'static = GlobalAlloc> {
map: MaybeDangling<Box<PathMap<V, A>>>,
z: Box<ReadZipperCore<'static, 'static, V, A>>,
}
impl<V: 'static + Clone + Send + Sync + Unpin, A: Allocator> Clone for ReadZipperOwned<V, A> {
fn clone(&self) -> Self {
let new_map = (**self.map).clone();
Self::new_with_map(new_map, self.root_prefix_path())
}
}
impl<V: 'static + Clone + Send + Sync + Unpin, A: Allocator> ReadZipperOwned<V, A> {
pub(crate) fn new_with_map<K: AsRef<[u8]>>(map: PathMap<V, A>, path: K) -> Self {
map.ensure_root();
let alloc = map.alloc.clone();
let path = path.as_ref();
let map = MaybeDangling::new(Box::new(map));
let root_ref = unsafe{ &*(*map).root.get() }.as_ref().unwrap();
let root_val = Option::as_ref( unsafe{ &*(*map).root_val.get() } );
let core = ReadZipperCore::new_with_node_and_cloned_path_in(root_ref, true, path, path.len(), 0, root_val, alloc);
Self { map, z: Box::new(core) }
}
pub fn into_map(self) -> PathMap<V, A> {
drop(self.z);
let map = MaybeDangling::into_inner(self.map);
*map
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for ReadZipperOwned<V, A> {
#[inline]
fn path_exists(&self) -> bool { self.z.path_exists() }
fn is_val(&self) -> bool { self.z.is_val() }
fn child_count(&self) -> usize { self.z.child_count() }
fn child_mask(&self) -> ByteMask { self.z.child_mask() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for ReadZipperOwned<V, A> {
fn val(&self) -> Option<&V> { unsafe{ self.z.get_val() } }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for ReadZipperOwned<V, A> {
type ReadZipperT<'a> = ReadZipperUntracked<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
let forked_zipper = self.z.fork_read_zipper();
Self::ReadZipperT::new_forked_with_inner_zipper(forked_zipper)
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for ReadZipperOwned<V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> { self.z.make_map() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperMoving for ReadZipperOwned<V, A> {
fn at_root(&self) -> bool { self.z.at_root() }
fn reset(&mut self) { self.z.reset() }
#[inline]
fn path(&self) -> &[u8] { self.z.path() }
fn val_count(&self) -> usize { self.z.val_count() }
fn descend_to<K: AsRef<[u8]>>(&mut self, k: K) { self.z.descend_to(k) }
fn descend_to_check<K: AsRef<[u8]>>(&mut self, k: K) -> bool { self.z.descend_to_check(k) }
fn descend_to_existing<K: AsRef<[u8]>>(&mut self, k: K) -> usize { self.z.descend_to_existing(k) }
fn descend_to_val<K: AsRef<[u8]>>(&mut self, k: K) -> usize { self.z.descend_to_val(k) }
fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) }
fn descend_to_existing_byte(&mut self, k: u8) -> bool { self.z.descend_to_existing_byte(k) }
fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) }
fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() }
fn descend_until(&mut self) -> bool { self.z.descend_until() }
fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() }
fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() }
fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) }
fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() }
fn ascend_until(&mut self) -> bool { self.z.ascend_until() }
fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() }
fn to_next_step(&mut self) -> bool { self.z.to_next_step() }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalValues<'trie, V> for ReadZipperOwned<V, A> {
type WitnessT = ReadZipperWitness<V, A>;
fn witness<'w>(&self) -> ReadZipperWitness<V, A> { self.z.witness() }
fn get_val_with_witness<'w>(&self, witness: &'w ReadZipperWitness<V, A>) -> Option<&'w V> where 'trie: 'w { self.z.get_val_with_witness(witness) }
}
impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator> ZipperReadOnlySubtries<'a, V, A> for ReadZipperOwned<V, A> where Self: 'a {
type TrieRefT = TrieRefOwned<V, A>;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> TrieRefOwned<V, A> {
let path = path.as_ref();
TrieRefOwned::new_with_key_and_path_in(self.z.focus_parent(), self.val(), self.z.node_key(), path, self.z.alloc.clone())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for ReadZipperOwned<V, A> {
#[inline]
fn shared_node_id(&self) -> Option<u64> { self.z.shared_node_id() }
#[inline]
fn is_shared(&self) -> bool { self.z.is_shared() }
}
impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator> ZipperReadOnlyPriv<'a, V, A> for ReadZipperOwned<V, A> where Self: 'a {
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'z, V, A>, &'z [u8], Option<&'z V>) { self.z.borrow_raw_parts() }
fn take_core(&mut self) -> Option<ReadZipperCore<'a, 'static, V, A>> {
let mut temp_core = ReadZipperCore::new_with_node_and_path_internal_in(OwnedOrBorrowed::None, &[], 0, None, self.z.alloc.clone());
core::mem::swap(&mut temp_core, &mut self.z);
Some(temp_core.make_static_path())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> zipper_priv::ZipperPriv for ReadZipperOwned<V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> { self.z.get_focus() }
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> { self.z.try_borrow_focus() }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperPathBuffer for ReadZipperOwned<V, A> {
unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] { unsafe{ self.z.origin_path_assert_len(len) } }
fn prepare_buffers(&mut self) { self.z.prepare_buffers() }
fn reserve_buffers(&mut self, path_len: usize, stack_depth: usize) { self.z.reserve_buffers(path_len, stack_depth) }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperIteration for ReadZipperOwned<V, A> {
fn to_next_val(&mut self) -> bool { self.z.to_next_val() }
fn descend_first_k_path(&mut self, k: usize) -> bool { self.z.descend_first_k_path(k) }
fn to_next_k_path(&mut self, k: usize) -> bool { self.z.to_next_k_path(k) }
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalIteration<'trie, V> for ReadZipperOwned<V, A> { }
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperAbsolutePath for ReadZipperOwned<V, A> {
fn origin_path(&self) -> &[u8] { self.z.origin_path() }
fn root_prefix_path(&self) -> &[u8] { self.z.root_prefix_path() }
}
pub(crate) const EXPECTED_DEPTH: usize = 16;
pub(crate) const EXPECTED_PATH_LEN: usize = 64;
pub(crate) mod read_zipper_core {
use crate::trie_node::*;
use crate::PathMap;
use crate::zipper::*;
pub struct ReadZipperCore<'a, 'path, V: Clone + Send + Sync, A: Allocator> {
origin_path: SliceOrLen<'path>,
root_key_start: usize,
root_parent_key_start: usize,
root_val: Option<&'a V>,
root_node: OwnedOrBorrowed<'a, TrieNodeODRc<V, A>>,
focus_node: MiriWrapper<TaggedNodeRef<'a, V, A>>,
focus_iter_token: u128,
prefix_buf: Vec<u8>,
ancestors: Vec<(TaggedNodeRef<'a, V, A>, u128, usize)>,
pub(crate) alloc: A,
}
#[derive(Clone, Debug)]
pub enum OwnedOrBorrowed<'a, T> {
Owned(T),
Borrowed(&'a T),
None,
}
impl<'a, T> From<Option<&'a T>> for OwnedOrBorrowed<'a, T> {
fn from(opt: Option<&'a T>) -> Self {
match opt {
Some(t) => Self::Borrowed(t),
None => Self::None
}
}
}
impl<'a, T> OwnedOrBorrowed<'a, T> {
#[inline]
pub fn as_ref(&self) -> &T {
match self {
Self::Owned(t) => &t,
Self::Borrowed(t) => t,
Self::None => panic!(),
}
}
pub fn as_borrowed_ref(&self) -> &'a T {
match self {
Self::Borrowed(t) => t,
Self::Owned(_) => panic!(),
Self::None => panic!(),
}
}
pub fn get_owned_ref(&self) -> Option<&T> {
match self {
Self::Owned(t) => Some(&t),
Self::Borrowed(_) => None,
Self::None => None,
}
}
pub fn is_owned(&self) -> bool {
match self {
Self::Owned(_) => true,
Self::Borrowed(_) => false,
Self::None => false,
}
}
}
#[cfg(miri)]
type MiriWrapper<T> = Box<T>;
#[cfg(not(miri))]
use miri_wrapper::MiriWrapper;
mod miri_wrapper {
use std::ops::{Deref, DerefMut};
#[derive(Clone, Debug)]
pub struct MiriWrapper<T>(T);
impl<T> Deref for MiriWrapper<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.0
}
}
impl<T> DerefMut for MiriWrapper<T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T> MiriWrapper<T> {
pub fn new(t: T) -> Self {
Self(t)
}
}
}
impl<V: Clone + Send + Sync, A: Allocator> Clone for ReadZipperCore<'_, '_, V, A> where V: Clone {
fn clone(&self) -> Self {
Self {
origin_path: self.origin_path.clone(),
root_key_start: self.root_key_start,
root_parent_key_start: self.root_parent_key_start,
root_val: self.root_val,
root_node: self.root_node.clone(),
focus_node: self.focus_node.clone(),
focus_iter_token: NODE_ITER_INVALID,
prefix_buf: self.prefix_buf.clone(),
ancestors: self.ancestors.clone(),
alloc: self.alloc.clone(),
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for ReadZipperCore<'_, '_, V, A> {
#[inline]
fn path_exists(&self) -> bool {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.node_contains_partial_key(key)
} else {
true
}
}
fn is_val(&self) -> bool {
self.is_val_internal()
}
fn child_count(&self) -> usize {
debug_assert!(self.is_regularized());
self.focus_node.count_branches(self.node_key())
}
fn child_mask(&self) -> ByteMask {
debug_assert!(self.is_regularized());
self.focus_node.node_branches_mask(self.node_key())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for ReadZipperCore<'_, '_, V, A> {
fn val(&self) -> Option<&V> { unsafe{ self.get_val() } }
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for ReadZipperCore<'_, '_, V, A> {
type ReadZipperT<'a> = ReadZipperCore<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
let new_root_val = self.val();
let new_root_path = self.origin_path();
let new_root_key_start = new_root_path.len() - self.node_key().len();
Self::ReadZipperT::new_with_node_and_path_internal_in(OwnedOrBorrowed::Borrowed(self.focus_parent()), new_root_path, new_root_key_start, new_root_val, self.alloc.clone())
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for ReadZipperCore<'_, '_, V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> {
#[cfg(not(feature = "graft_root_vals"))]
let root_val = None;
#[cfg(feature = "graft_root_vals")]
let root_val = self.val().cloned();
let root_node = self.get_focus().into_option();
if root_node.is_some() || root_val.is_some() {
Some(PathMap::new_with_root_in(root_node, root_val, self.alloc.clone()))
} else {
None
}
}
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperMoving for ReadZipperCore<'trie, '_, V, A> {
fn at_root(&self) -> bool {
self.prefix_buf.len() <= self.origin_path.len()
}
fn reset(&mut self) {
self.ancestors.truncate(1);
match self.ancestors.pop() {
Some((node, _tok, _prefix_len)) => {
*self.focus_node = node;
self.focus_iter_token = NODE_ITER_INVALID;
},
None => {}
}
self.prefix_buf.truncate(self.origin_path.len());
}
#[inline]
fn path(&self) -> &[u8] {
if self.prefix_buf.len() > 0 {
&self.prefix_buf[self.origin_path.len()..]
} else {
&[]
}
}
fn val_count(&self) -> usize {
let root_val = self.is_val() as usize;
if self.node_key().len() == 0 {
val_count_below_root(*self.focus_node) + root_val
} else {
let focus = self.get_focus();
if focus.is_none() {
root_val
} else {
val_count_below_root(focus.as_tagged()) + root_val
}
}
}
fn descend_to<K: AsRef<[u8]>>(&mut self, k: K) {
let k = k.as_ref();
if k.len() == 0 {
return }
self.prepare_buffers();
debug_assert!(self.is_regularized());
self.descend_to_internal(k);
}
fn descend_to_check<K: AsRef<[u8]>>(&mut self, k: K) -> bool {
let k = k.as_ref();
if k.len() == 0 {
return self.path_exists() }
self.prepare_buffers();
debug_assert!(self.is_regularized());
let (borrowed_self, key) = self.descend_to_internal(k);
if key.len() == 0 {
debug_assert!(self.path_exists());
true
} else {
borrowed_self.focus_node.node_contains_partial_key(key)
}
}
#[inline]
fn descend_to_byte(&mut self, k: u8) {
self.prepare_buffers();
debug_assert!(self.is_regularized());
self.prefix_buf.push(k);
self.focus_iter_token = NODE_ITER_INVALID;
if let Some((_consumed_byte_cnt, next_node)) = self.focus_node.node_get_child(self.node_key()) {
let next_node = next_node.as_tagged();
self.ancestors.push((*self.focus_node, self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = next_node;
}
}
#[inline]
fn descend_to_existing_byte(&mut self, k: u8) -> bool {
self.prepare_buffers();
debug_assert!(self.is_regularized());
self.prefix_buf.push(k);
let node_key = self.node_key();
if let Some((_consumed_byte_cnt, next_node)) = self.focus_node.node_get_child(node_key) {
self.focus_iter_token = NODE_ITER_INVALID;
let next_node = next_node.as_tagged();
self.ancestors.push((*self.focus_node, self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = next_node;
return true;
}
if self.focus_node.node_contains_partial_key(node_key) {
true
} else {
self.prefix_buf.pop();
false
}
}
fn descend_indexed_byte(&mut self, child_idx: usize) -> bool {
self.prepare_buffers();
debug_assert!(self.is_regularized());
match self.focus_node.nth_child_from_key(self.node_key(), child_idx) {
(Some(prefix), Some(child_node)) => {
self.prefix_buf.push(prefix);
self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = child_node;
self.focus_iter_token = NODE_ITER_INVALID;
true
},
(Some(prefix), None) => {
self.prefix_buf.push(prefix);
self.focus_iter_token = NODE_ITER_INVALID;
true
},
(None, _) => false
}
}
fn descend_first_byte(&mut self) -> bool {
self.prepare_buffers();
debug_assert!(self.is_regularized());
let cur_tok = self.focus_node.iter_token_for_path(self.node_key());
self.focus_iter_token = cur_tok;
let (new_tok, key_bytes, child_node, _value) = self.focus_node.next_items(self.focus_iter_token);
if new_tok != NODE_ITER_FINISHED {
let byte_idx = self.node_key().len();
if byte_idx >= key_bytes.len() {
debug_assert!(self.is_regularized());
return false; }
self.focus_iter_token = new_tok;
self.prefix_buf.push(key_bytes[byte_idx]);
if key_bytes.len() == byte_idx+1 {
match child_node {
None => {},
Some(rec) => {
self.ancestors.push((*self.focus_node.clone(), new_tok, self.prefix_buf.len()));
*self.focus_node = rec.as_tagged();
self.focus_iter_token = self.focus_node.new_iter_token();
},
}
}
debug_assert!(self.is_regularized());
true
} else {
self.focus_iter_token = new_tok;
debug_assert!(self.is_regularized());
false
}
}
fn descend_until(&mut self) -> bool {
debug_assert!(self.is_regularized());
let mut moved = false;
while self.child_count() == 1 {
moved = true;
self.descend_first();
if self.is_val_internal() {
break;
}
}
moved
}
fn descend_to_existing<K: AsRef<[u8]>>(&mut self, k: K) -> usize {
let mut k = k.as_ref();
if k.len() == 0 {
return 0 }
self.prepare_buffers();
debug_assert!(self.is_regularized());
let original_path_len = self.prefix_buf.len();
let mut key_start = self.node_key_start();
if key_start < self.prefix_buf.len() && !self.focus_node.node_contains_partial_key(&self.prefix_buf[key_start..]) {
return 0
}
const CHUNK_SIZE: usize = 64;
debug_assert!(CHUNK_SIZE >= MAX_NODE_KEY_BYTES);
while k.len() > 0 {
let (chunk, remaining) = if k.len() > CHUNK_SIZE {
(&k[..CHUNK_SIZE], &k[CHUNK_SIZE..])
} else {
(k, &[][..])
};
let _ = self.descend_to_internal(chunk);
let new_key_start = self.node_key_start();
if new_key_start == key_start {
break;
}
key_start = new_key_start;
k = remaining;
}
let node_key = &self.prefix_buf[key_start..];
let overlap = if node_key.len() > 0 {
self.focus_node.node_key_overlap(node_key)
} else {
0
};
self.prefix_buf.truncate(key_start+overlap);
debug_assert!(self.is_regularized());
self.prefix_buf.len() - original_path_len
}
fn to_next_sibling_byte(&mut self) -> bool {
self.prepare_buffers();
if self.prefix_buf.len() == 0 {
return false
}
debug_assert!(self.is_regularized());
self.deregularize();
if self.focus_iter_token == NODE_ITER_INVALID {
let cur_tok = self.focus_node.iter_token_for_path(self.node_key());
self.focus_iter_token = cur_tok;
}
if self.focus_iter_token == NODE_ITER_FINISHED {
self.regularize();
return false
}
let (mut new_tok, mut key_bytes, mut child_node, mut _value) = self.focus_node.next_items(self.focus_iter_token);
while new_tok != NODE_ITER_FINISHED {
let node_key = self.node_key();
if node_key.len() == 0 {
self.focus_iter_token = NODE_ITER_INVALID;
return false;
}
let fixed_len = node_key.len() - 1;
if fixed_len >= key_bytes.len() || key_bytes[..fixed_len] != node_key[..fixed_len] {
self.regularize();
return false;
}
if key_bytes[fixed_len] > node_key[fixed_len] {
*self.prefix_buf.last_mut().unwrap() = key_bytes[node_key.len()-1];
self.focus_iter_token = new_tok;
if key_bytes.len() == 1 {
match child_node {
None => {},
Some(rec) => {
self.ancestors.push((*self.focus_node.clone(), new_tok, self.prefix_buf.len()));
*self.focus_node = rec.as_tagged();
self.focus_iter_token = NODE_ITER_INVALID
},
}
}
debug_assert!(self.is_regularized());
return true
}
(new_tok, key_bytes, child_node, _value) = self.focus_node.next_items(new_tok);
}
self.focus_iter_token = NODE_ITER_FINISHED;
self.regularize();
false
}
fn to_prev_sibling_byte(&mut self) -> bool {
self.to_sibling(false)
}
fn ascend(&mut self, mut steps: usize) -> bool {
debug_assert!(self.is_regularized());
while steps > 0 {
if self.excess_key_len() == 0 {
match self.ancestors.pop() {
Some((node, iter_tok, _prefix_offset)) => {
*self.focus_node = node;
self.focus_iter_token = iter_tok;
},
None => {
debug_assert!(self.is_regularized());
return false
}
};
}
let cur_jump = steps.min(self.excess_key_len());
self.prefix_buf.truncate(self.prefix_buf.len() - cur_jump);
steps -= cur_jump;
}
debug_assert!(self.is_regularized());
true
}
fn ascend_byte(&mut self) -> bool {
debug_assert!(self.is_regularized());
if self.excess_key_len() == 0 {
match self.ancestors.pop() {
Some((node, iter_tok, _prefix_offset)) => {
*self.focus_node = node;
self.focus_iter_token = iter_tok;
},
None => {
debug_assert!(self.is_regularized());
return false
}
};
}
self.prefix_buf.pop();
debug_assert!(self.is_regularized());
true
}
fn ascend_until(&mut self) -> bool {
debug_assert!(self.is_regularized());
if self.at_root() {
return false;
}
loop {
if self.node_key().len() == 0 {
self.ascend_across_nodes();
}
self.ascend_within_node();
if self.child_count() > 1 || self.is_val() || self.at_root() {
return true;
}
}
}
fn ascend_until_branch(&mut self) -> bool {
debug_assert!(self.is_regularized());
if self.at_root() {
return false;
}
loop {
if self.node_key().len() == 0 {
self.ascend_across_nodes();
}
self.ascend_within_node();
if self.child_count() > 1 || self.at_root() {
return true;
}
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> zipper_priv::ZipperPriv for ReadZipperCore<'_, '_, V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> {
let node_key = self.node_key();
let (focus_node, node_key) = if node_key.len() == 0 {
match self.ancestors.last() {
Some((focus_node, _iter_tok, _prefix_offset)) => (focus_node, self.parent_key()),
None => {
return AbstractNodeRef::BorrowedRc(self.root_node.as_ref())
}
}
} else {
(&*self.focus_node, node_key)
};
focus_node.get_node_at_key(node_key)
}
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> {
let node_key = self.node_key();
let (focus_node, node_key) = if node_key.len() == 0 {
let parent_key = self.parent_key();
if parent_key.len() == 0 {
return Some(self.root_node.as_ref())
}
let parent_node = match self.ancestors.last() {
Some((focus_node, _iter_tok, _prefix_offset)) => {
*focus_node
},
None => {
self.root_node.as_ref().as_tagged()
}
};
(parent_node, parent_key)
} else {
(*self.focus_node, node_key)
};
match focus_node.node_get_child(node_key) {
Some((consumed_bytes, child_node)) => {
debug_assert_eq!(consumed_bytes, node_key.len());
Some(child_node)
},
None => None
}
}
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPathBuffer for ReadZipperCore<'trie, '_, V, A> {
unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] {
if self.prefix_buf.capacity() > 0 {
assert!(len <= self.prefix_buf.capacity());
unsafe{ core::slice::from_raw_parts(self.prefix_buf.as_ptr(), len) }
} else {
assert!(len <= self.origin_path.len());
unsafe{ &self.origin_path.as_slice_unchecked() }
}
}
#[inline(always)]
fn prepare_buffers(&mut self) {
if self.prefix_buf.capacity() == 0 {
self.reserve_buffers(EXPECTED_PATH_LEN, EXPECTED_DEPTH)
}
}
#[cold]
fn reserve_buffers(&mut self, path_len: usize, stack_depth: usize) {
let path_len = path_len.max(self.origin_path.len());
if self.prefix_buf.capacity() < path_len {
let was_unallocated = self.prefix_buf.capacity() == 0;
self.prefix_buf.reserve(path_len.saturating_sub(self.prefix_buf.len()));
if was_unallocated {
self.prefix_buf.extend(unsafe{ self.origin_path.as_slice_unchecked() });
}
}
if self.ancestors.capacity() < stack_depth {
self.ancestors.reserve(stack_depth.saturating_sub(self.ancestors.len()));
}
}
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalValues<'trie, V> for ReadZipperCore<'trie, '_, V, A> {
type WitnessT = ReadZipperWitness<V, A>;
fn witness<'w>(&self) -> Self::WitnessT {
if self.root_node.is_owned() {
ReadZipperWitness(Some(self.root_node.as_ref().clone()))
} else {
ReadZipperWitness(None)
}
}
fn get_val_with_witness<'w>(&self, witness: &'w Self::WitnessT) -> Option<&'w V> where 'trie: 'w {
assert_eq!(witness.0.as_ref(), self.root_node.get_owned_ref());
let key = self.node_key();
if key.len() > 0 {
self.focus_node.node_get_val(key)
} else {
if let Some((parent, _iter_tok, _prefix_offset)) = self.ancestors.last() {
parent.node_get_val(self.parent_key())
} else {
if self.root_val.is_some() {
self.root_val
} else {
witness.0.as_ref().unwrap().as_tagged().node_get_val(self.root_node_key())
}
}
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperReadOnlyPriv<'a, V, A> for ReadZipperCore<'a, '_, V, A> {
fn borrow_raw_parts(&self) -> (TaggedNodeRef<'_, V, A>, &[u8], Option<&V>) {
let node_key = self.node_key();
if node_key.len() > 0 {
(*self.focus_node, node_key, None)
} else {
(*self.focus_node, &[], self.val())
}
}
fn take_core(&mut self) -> Option<ReadZipperCore<'a, 'static, V, A>> {
unreachable!()
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for ReadZipperCore<'_, '_, V, A> {
#[inline]
fn shared_node_id(&self) -> Option<u64> {
read_zipper_shared_node_id(self)
}
#[inline]
fn is_shared(&self) -> bool {
let key = self.node_key();
if key.len() > 0 {
false
} else {
if let Some((parent, _iter_tok, _prefix_offset)) = self.ancestors.last() {
let (_key_len, focus_node) = parent.node_get_child(self.parent_key()).unwrap();
focus_node.refcount() > 1
} else {
false }
}
}
}
#[inline]
pub(crate) fn read_zipper_shared_node_id<'a, V: Clone + Send + Sync + 'a, A: Allocator + 'a, Z: Zipper + ZipperReadOnlyPriv<'a, V, A> + ZipperConcrete>(zipper: &Z) -> Option<u64> {
let (node, key, value) = zipper.borrow_raw_parts();
if !zipper.is_shared() || !key.is_empty() || value.is_some() {
return None
}
Some(node.shared_node_id())
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperIteration for ReadZipperCore<'trie, '_, V, A> {
fn to_next_val(&mut self) -> bool {
unsafe{ self.to_next_get_val() }.is_some()
}
fn descend_first_k_path(&mut self, k: usize) -> bool {
self.prepare_buffers();
debug_assert!(self.is_regularized());
let cur_tok = self.focus_node.iter_token_for_path(self.node_key());
self.focus_iter_token = cur_tok;
self.k_path_internal(k, self.prefix_buf.len())
}
fn to_next_k_path(&mut self, k: usize) -> bool {
let base_idx = if self.path_len() >= k {
self.prefix_buf.len() - k
} else {
self.origin_path.len()
};
debug_assert!(self.is_regularized());
self.deregularize();
self.k_path_internal(k, base_idx)
}
}
impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperAbsolutePath for ReadZipperCore<'trie, '_, V, A> {
fn origin_path(&self) -> &[u8] {
if self.prefix_buf.capacity() > 0 {
&self.prefix_buf
} else {
unsafe{ &self.origin_path.as_slice_unchecked() }
}
}
fn root_prefix_path(&self) -> &[u8] {
if self.prefix_buf.capacity() > 0 {
&self.prefix_buf[..self.origin_path.len()]
} else {
unsafe{ &self.origin_path.as_slice_unchecked() }
}
}
}
impl<'a, 'path, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ReadZipperCore<'a, 'path, V, A> {
pub(crate) fn new_with_node_and_path_in(root_node: &'a TrieNodeODRc<V, A>, owned_root: bool, path: &'path [u8], root_prefix_len: usize, root_key_start: usize, root_val: Option<&'a V>, alloc: A) -> Self {
let (node, key, val) = node_along_path(root_node, &path[root_key_start..], root_val, false);
let node = if owned_root {
OwnedOrBorrowed::Owned(node.clone())
} else {
OwnedOrBorrowed::Borrowed(node)
};
let new_root_key_start = root_prefix_len - key.len();
Self::new_with_node_and_path_internal_in(node, path, new_root_key_start, val, alloc)
}
pub(crate) fn new_with_node_and_path_internal_in(root_node: OwnedOrBorrowed<'a, TrieNodeODRc<V, A>>, path: &'path [u8], mut root_key_start: usize, root_val: Option<&'a V>, alloc: A) -> Self {
let mut focus: TaggedNodeRef<'a, V, A> = match &root_node {
OwnedOrBorrowed::Owned(root_node) => {
unsafe{ core::mem::transmute(root_node.as_tagged()) }
},
OwnedOrBorrowed::Borrowed(root_node) => {
root_node.as_tagged()
},
OwnedOrBorrowed::None => {
TaggedNodeRef::empty_node()
}
};
let mut root_parent_key_start = usize::MAX;
if path.len() > root_key_start {
if let Some((consumed_byte_cnt, next_node)) = focus.node_get_child(&path[root_key_start..]) {
focus = next_node.as_tagged();
root_parent_key_start = root_key_start;
root_key_start += consumed_byte_cnt;
}
}
Self {
origin_path: SliceOrLen::from(path),
root_key_start,
root_parent_key_start,
root_val: root_val.map(|v| v.into()),
focus_node: MiriWrapper::new(focus),
root_node,
focus_iter_token: NODE_ITER_INVALID,
prefix_buf: vec![],
ancestors: vec![],
alloc,
}
}
pub(crate) fn new_with_node_and_cloned_path_in(root_node: &'a TrieNodeODRc<V, A>, owned_root: bool, path: &[u8], root_prefix_len: usize, root_key_start: usize, root_val: Option<&'a V>, alloc: A) -> Self {
let (node, key, val) = node_along_path(root_node, &path[root_key_start..], root_val, false);
let node = if owned_root {
OwnedOrBorrowed::Owned(node.clone())
} else {
OwnedOrBorrowed::Borrowed(node)
};
let new_root_key_start = root_prefix_len - key.len();
let mut new_zipper = ReadZipperCore::<'a, '_, V, A>::new_with_node_and_path_internal_in(node, path, new_root_key_start, val, alloc);
new_zipper.prefix_buf = Vec::with_capacity(EXPECTED_PATH_LEN);
new_zipper.prefix_buf.extend(path);
new_zipper.origin_path = SliceOrLen::new_owned(path.len());
new_zipper.ancestors = Vec::with_capacity(EXPECTED_DEPTH);
new_zipper.make_static_path()
}
#[inline]
pub(crate) fn make_static_path(mut self) -> ReadZipperCore<'a, 'static, V, A> {
self.prepare_buffers();
ReadZipperCore {
origin_path: SliceOrLen::new_owned(self.origin_path.len()),
root_key_start: self.root_key_start,
root_parent_key_start: self.root_parent_key_start,
root_val: self.root_val,
root_node: self.root_node,
focus_node: self.focus_node,
focus_iter_token: NODE_ITER_INVALID,
prefix_buf: self.prefix_buf,
ancestors: self.ancestors,
alloc: self.alloc
}
}
#[inline(always)]
fn path_len(&self) -> usize {
self.prefix_buf.len() - self.origin_path.len()
}
#[inline]
pub(crate) fn regularize(&mut self) {
debug_assert!(self.prefix_buf.len() >= self.node_key_start()); if let Some((_consumed_byte_cnt, next_node)) = self.focus_node.node_get_child(self.node_key()) {
self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = next_node.as_tagged();
self.focus_iter_token = NODE_ITER_INVALID;
}
}
#[inline]
pub(crate) fn deregularize(&mut self) {
if self.prefix_buf.len() == self.node_key_start() {
self.ascend_across_nodes();
}
}
#[inline]
fn is_regularized(&self) -> bool {
let key_start = self.node_key_start();
if self.prefix_buf.len() > key_start {
self.focus_node.node_get_child(self.node_key()).is_none()
} else {
true
}
}
pub(crate) unsafe fn get_val(&self) -> Option<&'a V> {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.node_get_val(key)
} else {
if let Some((parent, _iter_tok, _prefix_offset)) = self.ancestors.last() {
parent.node_get_val(self.parent_key())
} else {
self.root_val
}
}
}
pub(crate) unsafe fn to_next_get_val(&mut self) -> Option<&'a V> {
self.prepare_buffers();
loop {
if self.focus_iter_token == NODE_ITER_INVALID {
let cur_tok = self.focus_node.iter_token_for_path(self.node_key());
self.focus_iter_token = cur_tok;
}
let (new_tok, key_bytes, child_node, value) = if self.focus_iter_token != NODE_ITER_FINISHED {
self.focus_node.next_items(self.focus_iter_token)
} else {
(NODE_ITER_FINISHED, &[][..] as &[u8], None, None)
};
if new_tok != NODE_ITER_FINISHED {
self.focus_iter_token = new_tok;
let key_start = self.node_key_start();
let origin_path_len = self.origin_path.len();
if key_start < origin_path_len {
debug_assert_eq!(self.ancestors.len(), 0);
let unmodifiable_len = origin_path_len - key_start;
let unmodifiable_subkey = &self.prefix_buf[key_start..origin_path_len];
if unmodifiable_len > key_bytes.len() || &key_bytes[..unmodifiable_len] != unmodifiable_subkey {
self.prefix_buf.truncate(origin_path_len);
return None
}
}
self.prefix_buf.truncate(key_start);
self.prefix_buf.extend(key_bytes);
match child_node {
None => {},
Some(rec) => {
self.ancestors.push((*self.focus_node.clone(), new_tok, self.prefix_buf.len()));
*self.focus_node = rec.as_tagged();
self.focus_iter_token = self.focus_node.new_iter_token();
},
}
match value {
Some(v) => return Some(v),
None => {}
}
} else {
if let Some((focus_node, iter_tok, prefix_offset)) = self.ancestors.pop() {
*self.focus_node = focus_node;
self.focus_iter_token = iter_tok;
self.prefix_buf.truncate(prefix_offset);
} else {
let new_len = self.origin_path.len();
self.focus_iter_token = NODE_ITER_INVALID;
self.prefix_buf.truncate(new_len);
return None
}
}
}
}
#[inline]
pub(crate) fn focus_parent(&self) -> &TrieNodeODRc<V, A> {
let parent_key = self.parent_key();
if parent_key.len() == 0 {
return self.root_node.as_ref()
}
self.focus_parent_borrowed()
}
#[inline]
pub(crate) fn focus_parent_borrowed(&self) -> &'a TrieNodeODRc<V, A> {
let parent_key = self.parent_key();
if parent_key.len() == 0 {
return self.root_node.as_borrowed_ref()
}
let parent_node = match self.ancestors.last() {
Some((focus_node, _iter_tok, _prefix_offset)) => {
*focus_node
},
None => unreachable!()
};
let (key_len, node) = parent_node.node_get_child(parent_key).unwrap();
debug_assert_eq!(key_len, parent_key.len());
node
}
#[inline]
fn descend_to_internal(&mut self, k: &[u8]) -> (&Self, &[u8]) {
self.focus_iter_token = NODE_ITER_INVALID;
self.prefix_buf.extend(k);
let mut key_start = self.node_key_start();
let mut key = &self.prefix_buf[key_start..];
while let Some((consumed_byte_cnt, next_node)) = self.focus_node.node_get_child(key) {
let next_node = next_node.as_tagged();
key_start += consumed_byte_cnt;
self.ancestors.push((*self.focus_node.clone(), NODE_ITER_INVALID, key_start));
*self.focus_node = next_node;
if consumed_byte_cnt < key.len() {
key = &key[consumed_byte_cnt..]
} else {
return (self, &[]);
};
}
(self, key)
}
#[inline]
fn to_sibling(&mut self, next: bool) -> bool {
self.prepare_buffers();
debug_assert!(self.is_regularized());
if self.node_key().len() != 0 {
match self.focus_node.get_sibling_of_child(self.node_key(), next) {
(Some(prefix), Some(child_node)) => {
*self.prefix_buf.last_mut().unwrap() = prefix;
self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = child_node;
self.focus_iter_token = NODE_ITER_INVALID;
true
},
(Some(prefix), None) => {
*self.prefix_buf.last_mut().unwrap() = prefix;
true
},
(None, _) => false
}
} else {
let mut should_pop = false;
let result = match self.ancestors.last() {
None => { false }
Some((parent, _iter_tok, _prefix_offset)) => {
match parent.get_sibling_of_child(self.parent_key(), next) {
(Some(prefix), Some(child_node)) => {
*self.prefix_buf.last_mut().unwrap() = prefix;
*self.focus_node = child_node;
self.focus_iter_token = NODE_ITER_INVALID;
true
},
(Some(prefix), None) => {
*self.prefix_buf.last_mut().unwrap() = prefix;
should_pop = true;
true
},
(None, _) => {
false
}
}
}
};
if should_pop {
let (focus_node, iter_tok, _prefix_offset) = self.ancestors.pop().unwrap();
*self.focus_node = focus_node;
self.focus_iter_token = iter_tok;
}
result
}
}
#[inline]
fn k_path_internal(&mut self, k: usize, base_idx: usize) -> bool {
loop {
debug_assert!(self.prefix_buf.len() <= base_idx+k);
debug_assert!(self.prefix_buf.len() >= base_idx);
if self.focus_iter_token == NODE_ITER_INVALID {
self.focus_iter_token = self.focus_node.iter_token_for_path(self.node_key());
let (new_tok, key_bytes, _child_node, _value) = self.focus_node.next_items(self.focus_iter_token);
let node_key = self.node_key();
if key_bytes.len() >= node_key.len() {
if &key_bytes[..node_key.len()] == node_key {
self.focus_iter_token = new_tok;
}
}
}
if self.focus_iter_token == NODE_ITER_FINISHED {
if self.node_key_start() <= base_idx {
self.focus_iter_token = NODE_ITER_FINISHED;
self.prefix_buf.truncate(base_idx);
return false
}
if let Some((focus_node, iter_tok, prefix_offset)) = self.ancestors.pop() {
*self.focus_node = focus_node;
self.focus_iter_token = iter_tok;
self.prefix_buf.truncate(prefix_offset);
} else {
let new_len = self.origin_path.len();
self.focus_iter_token = NODE_ITER_INVALID;
self.prefix_buf.truncate(new_len);
return false
}
}
let (new_tok, key_bytes, child_node, _value) = self.focus_node.next_items(self.focus_iter_token);
if new_tok != NODE_ITER_FINISHED {
let key_start = self.node_key_start();
if key_start < base_idx {
let base_key_len = base_idx - key_start; if base_key_len > key_bytes.len() || &key_bytes[..base_key_len] != &self.prefix_buf[key_start..base_idx] {
self.prefix_buf.truncate(base_idx);
return false;
}
}
self.focus_iter_token = new_tok;
let key_start = self.node_key_start();
self.prefix_buf.truncate(key_start);
self.prefix_buf.extend(key_bytes);
if self.prefix_buf.len() <= k+base_idx {
match child_node {
None => {},
Some(rec) => {
self.ancestors.push((*self.focus_node.clone(), new_tok, self.prefix_buf.len()));
*self.focus_node = rec.as_tagged();
self.focus_iter_token = self.focus_node.new_iter_token();
},
}
} else {
self.prefix_buf.truncate(k+base_idx);
}
if self.prefix_buf.len() == k+base_idx {
return true;
}
} else {
self.focus_iter_token = NODE_ITER_FINISHED;
}
}
}
#[inline]
fn is_val_internal(&self) -> bool {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.node_contains_val(key)
} else {
if let Some((parent, _iter_tok, _prefix_offset)) = self.ancestors.last() {
parent.node_contains_val(self.parent_key())
} else {
self.root_val.is_some()
}
}
}
#[inline]
fn descend_first(&mut self) {
self.prepare_buffers();
match self.focus_node.first_child_from_key(self.node_key()) {
(Some(prefix), Some(child_node)) => {
self.prefix_buf.extend(prefix);
self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = child_node;
self.focus_iter_token = NODE_ITER_INVALID;
if prefix.len() == 0 {
self.descend_first()
}
},
(Some(prefix), None) => {
self.prefix_buf.extend(prefix);
},
(None, _) => unreachable!()
}
}
#[inline]
fn node_key_start(&self) -> usize {
self.ancestors.last().map(|(_node, _iter_tok, i)| *i)
.unwrap_or_else(|| self.root_key_start)
}
#[inline]
pub(crate) fn node_key(&self) -> &[u8] {
let key_start = self.node_key_start();
if self.prefix_buf.len() > 0 {
&self.prefix_buf[key_start..]
} else {
if self.origin_path.len() > 0 {
unsafe{ &self.origin_path.as_slice_unchecked()[key_start..] }
} else {
&[]
}
}
}
#[inline]
fn root_node_key(&self) -> &[u8] {
debug_assert!(matches!(self.root_node, OwnedOrBorrowed::Owned(_)));
debug_assert!(self.root_parent_key_start < usize::MAX);
if self.prefix_buf.capacity() == 0 && self.origin_path.len() > 0 {
unsafe{ &self.origin_path.as_slice_unchecked()[self.root_parent_key_start..] }
} else {
&self.prefix_buf[self.root_parent_key_start..self.root_key_start]
}
}
#[inline]
fn parent_key(&self) -> &[u8] {
if self.prefix_buf.len() > 0 {
let key_start = if self.ancestors.len() > 1 {
unsafe{ self.ancestors.get_unchecked(self.ancestors.len()-2) }.2
} else {
self.root_key_start
};
&self.prefix_buf[key_start..self.node_key_start()]
} else {
if self.root_parent_key_start == usize::MAX || self.origin_path.len() == 0 {
&[]
} else {
let origin_path = unsafe{ self.origin_path.as_slice_unchecked() };
&origin_path[self.root_parent_key_start..self.root_key_start]
}
}
}
#[inline]
fn excess_key_len(&self) -> usize {
self.prefix_buf.len() - self.ancestors.last().map(|(_node, _iter_tok, i)| *i).unwrap_or(self.origin_path.len())
}
#[inline]
fn ascend_across_nodes(&mut self) {
debug_assert!(self.node_key().len() == 0);
if let Some((focus_node, iter_tok, _prefix_offset)) = self.ancestors.pop() {
*self.focus_node = focus_node;
self.focus_iter_token = iter_tok;
} else {
self.focus_iter_token = NODE_ITER_INVALID;
}
}
#[inline]
fn ascend_within_node(&mut self) {
let branch_key = self.focus_node.prior_branch_key(self.node_key());
let new_len = self.origin_path.len().max(self.node_key_start() + branch_key.len());
self.prefix_buf.truncate(new_len);
}
pub(crate) fn push_node(&mut self, node: TaggedNodeRef<'a, V, A>) {
self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len()));
*self.focus_node = node;
self.focus_iter_token = NODE_ITER_INVALID;
}
}
#[test]
fn read_zipper_reserve_buffer_test() {
let map = PathMap::<()>::new();
let mut rz = map.read_zipper();
assert_eq!(rz.z.prefix_buf.capacity(), 0);
rz.reserve_buffers(4096, 512);
assert_eq!(rz.z.prefix_buf.capacity(), 4096);
let old_ptr = rz.z.prefix_buf.as_ptr();
rz.descend_to(b"hello");
assert_eq!(rz.z.prefix_buf.capacity(), 4096);
assert_eq!(rz.z.prefix_buf.as_ptr(), old_ptr);
assert_eq!(&rz.z.prefix_buf, b"hello");
assert_eq!(&rz.path(), b"hello");
let mut rz = map.read_zipper_at_borrowed_path(b"hi");
assert_eq!(rz.z.prefix_buf.capacity(), 0);
assert_eq!(rz.path(), b"");
assert_eq!(rz.origin_path(), b"hi");
rz.reserve_buffers(4096, 512);
assert_eq!(rz.z.prefix_buf.capacity(), 4096);
let old_ptr = rz.z.prefix_buf.as_ptr();
assert_eq!(rz.path(), b"");
assert_eq!(rz.origin_path(), b"hi");
assert_eq!(&rz.z.prefix_buf, b"hi");
rz.descend_to(b"-howdy");
assert_eq!(rz.z.prefix_buf.capacity(), 4096);
assert_eq!(rz.z.prefix_buf.as_ptr(), old_ptr);
assert_eq!(&rz.z.prefix_buf, b"hi-howdy");
assert_eq!(&rz.path(), b"-howdy");
assert_eq!(rz.origin_path(), b"hi-howdy");
}
}
use read_zipper_core::*;
pub(crate) fn node_along_path<'a, 'path, V: Clone + Sync + Send, A: Allocator + 'a>(root_node: &'a TrieNodeODRc<V, A>, path: &'path [u8], root_val: Option<&'a V>, stop_short: bool) -> (&'a TrieNodeODRc<V, A>, &'path [u8], Option<&'a V>) {
let mut key = path;
let mut node = root_node;
let mut val = root_val;
let mut tagged_node = node.as_tagged();
if key.len() > 0 {
while let Some((consumed_byte_cnt, next_node)) = tagged_node.node_get_child(key) {
if consumed_byte_cnt < key.len() {
node = next_node;
key = &key[consumed_byte_cnt..];
} else {
if !stop_short {
val = tagged_node.node_get_val(key);
node = next_node;
key = &[];
} else {
val = None;
}
break;
};
tagged_node = node.as_tagged();
}
}
(node, key, val)
}
pub struct ReadZipperIter<'a, 'path, V: Clone + Send + Sync, A: Allocator = GlobalAlloc>{
started: bool,
zipper: Option<ReadZipperCore<'a, 'path, V, A>>,
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> Iterator for ReadZipperIter<'a, '_, V, A> {
type Item = (Vec<u8>, &'a V);
fn next(&mut self) -> Option<(Vec<u8>, &'a V)> {
if !self.started {
self.started = true;
if let Some(zipper) = &mut self.zipper {
if let Some(val) = unsafe{ zipper.get_val() } {
return Some((zipper.path().to_vec(), val))
}
}
}
if let Some(zipper) = &mut self.zipper {
match unsafe{ zipper.to_next_get_val() } {
Some(val) => return Some((zipper.path().to_vec(), val)),
None => self.zipper = None
}
}
None
}
}
#[derive(Clone, Copy)]
pub(crate) enum SliceOrLen<'a> {
Slice(&'a [u8]),
Len(usize),
}
impl<'a> From<&'a [u8]> for SliceOrLen<'a> {
fn from(slice: &'a [u8]) -> Self {
Self::Slice(slice)
}
}
impl SliceOrLen<'static> {
#[inline]
pub fn new_owned(len: usize) -> Self {
if len == 0 {
Self::Slice(&[])
} else {
Self::Len(len)
}
}
}
#[allow(unused)]
impl<'a> SliceOrLen<'a> {
#[inline]
pub fn len(&self) -> usize {
match self {
Self::Slice(slice) => slice.len(),
Self::Len(len) => {
debug_assert!(*len > 0); *len
},
}
}
pub fn make_len(&mut self) {
if self.len() > 0 {
match self {
Self::Slice(slice) => {*self = Self::Len(slice.len())},
Self::Len(_) => {},
}
}
}
#[inline]
pub fn is_slice(&self) -> bool {
match self {
Self::Slice(_) => true,
Self::Len(_) => false,
}
}
#[inline]
pub fn as_slice(&self) -> &'a[u8] {
match self {
Self::Slice(slice) => slice,
Self::Len(_) => unreachable!()
}
}
#[inline]
pub fn try_as_slice(&self) -> Option<&'a[u8]> {
match self {
Self::Slice(slice) => Some(slice),
Self::Len(_) => None
}
}
#[inline]
pub unsafe fn as_slice_unchecked(&self) -> &'a[u8] {
match self {
Self::Slice(slice) => slice,
Self::Len(_) => unsafe{ core::hint::unreachable_unchecked() }
}
}
#[inline]
pub fn set_slice(&mut self, slice: &'a[u8]) {
*self = Self::Slice(slice);
}
#[inline]
pub fn set_len(&mut self, len: usize) {
if len > 0 {
*self = Self::Len(len)
} else {
*self = Self::Slice(&[])
}
}
}
#[cfg(test)]
pub(crate) mod zipper_moving_tests {
use crate::trie_map::*;
use super::*;
macro_rules! zipper_moving_tests {
($z_name:ident, $read_keys:expr, $make_z:expr)=>{
paste::paste! {
#[test]
fn [<$z_name _zipper_moving_basic_test>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_MOVING_BASIC_TEST_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_moving_basic_test)
}
#[test]
fn [<$z_name _zipper_with_root_path>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_WITH_ROOT_PATH_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, crate::zipper::zipper_moving_tests::ZIPPER_WITH_ROOT_PATH_PATH, crate::zipper::zipper_moving_tests::zipper_with_root_path)
}
#[test]
fn [<$z_name _zipper_indexed_bytes_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_INDEXED_BYTE_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_indexed_bytes_test1)
}
#[test]
fn [<$z_name _zipper_indexed_bytes_test2>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_INDEXED_BYTE_TEST2_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_indexed_bytes_test2)
}
#[test]
fn [<$z_name _zipper_descend_until_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_DESCEND_UNTIL_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_descend_until_test1)
}
#[test]
fn [<$z_name _zipper_ascend_until_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_ASCEND_UNTIL_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_ascend_until_test1)
}
#[test]
fn [<$z_name _zipper_ascend_until_test2>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_ASCEND_UNTIL_TEST2_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_ascend_until_test2)
}
#[test]
fn [<$z_name _zipper_ascend_until_test3>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_ASCEND_UNTIL_TEST3_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_ascend_until_test3)
}
#[test]
fn [<$z_name _zipper_ascend_until_test4>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_ASCEND_UNTIL_TEST4_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_ascend_until_test4)
}
#[test]
fn [<$z_name _zipper_ascend_until_test5>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_ASCEND_UNTIL_TEST5_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_ascend_until_test5)
}
#[test]
fn [<$z_name _indexed_zipper_movement1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_INDEXED_MOVEMENT_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::indexed_zipper_movement1)
}
#[test]
fn [<$z_name _zipper_value_locations>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_VALUE_LOCATIONS_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_value_locations)
}
#[test]
fn [<$z_name _zipper_child_mask_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_CHILD_MASK_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_child_mask_test1)
}
#[test]
fn [<$z_name _zipper_child_mask_test2>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_CHILD_MASK_TEST2_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_child_mask_test2)
}
#[test]
fn [<$z_name _descend_to_existing_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_DESCEND_TO_EXISTING_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::descend_to_existing_test1)
}
#[test]
fn [<$z_name _descend_to_existing_test2>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_DESCEND_TO_EXISTING_TEST2_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::descend_to_existing_test2)
}
#[test]
fn [<$z_name _descend_to_existing_test3>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_DESCEND_TO_EXISTING_TEST3_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::descend_to_existing_test3)
}
#[test]
fn [<$z_name _to_next_step_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_TO_NEXT_STEP_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::to_next_step_test1)
}
#[test]
fn [<$z_name _zipper_byte_iter_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST1_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_byte_iter_test1)
}
#[test]
fn [<$z_name _zipper_byte_iter_test2>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST2_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST2_PATH, crate::zipper::zipper_moving_tests::zipper_byte_iter_test2)
}
#[test]
fn [<$z_name _zipper_byte_iter_test3>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST3_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST3_PATH, crate::zipper::zipper_moving_tests::zipper_byte_iter_test3)
}
#[test]
fn [<$z_name _zipper_byte_iter_test4>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST4_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_byte_iter_test4)
}
#[test]
fn [<$z_name _zipper_byte_iter_test5>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_moving_tests::ZIPPER_BYTES_ITER_TEST5_KEYS);
crate::zipper::zipper_moving_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_moving_tests::zipper_byte_iter_test5)
}
}
}
}
pub(crate) use zipper_moving_tests;
pub fn run_test<'a, T: 'a + ZipperMoving, Store>(
store: &'a mut Store,
make_t: impl Fn(&'a mut Store, &'a[u8]) -> T,
z_path: &'a[u8],
test_f: impl Fn(T)
) {
let t = make_t(store, z_path);
test_f(t);
}
pub const ZIPPER_MOVING_BASIC_TEST_KEYS: &[&[u8]] = &[b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub fn zipper_moving_basic_test<Z: ZipperMoving>(mut zipper: Z) {
fn assert_in_list(val: &[u8], list: &[&[u8]]) {
for test_val in list {
if *test_val == val {
return;
}
}
panic!("val not found in list: {}", std::str::from_utf8(val).unwrap_or(""))
}
zipper.descend_to(&[b'r']); zipper.descend_to(&[b'o']); zipper.descend_to(&[b'm']); zipper.descend_to(&[b'\'']);
assert!(zipper.path_exists()); assert!(zipper.to_next_sibling_byte()); assert_in_list(zipper.path(), &[b"roma", b"romu"]);
assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'n']); assert!(zipper.to_next_sibling_byte()); assert_in_list(zipper.path(), &[b"roma", b"romu"]);
assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'l']); assert!(!zipper.to_next_sibling_byte()); assert!(zipper.to_prev_sibling_byte()); assert_in_list(zipper.path(), &[b"roma", b"romu"]);
assert!(zipper.to_prev_sibling_byte()); assert_eq!(zipper.path(), b"rom'");
assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'i']);
assert!(zipper.ascend(1)); assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'\'', b'a', b'u']); assert!(zipper.descend_indexed_byte(0)); assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'i']);
assert!(zipper.ascend(1)); assert!(zipper.descend_indexed_byte(1)); assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'n']);
assert!(zipper.ascend(1));
assert!(zipper.descend_indexed_byte(2)); assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'l']);
assert!(zipper.ascend(1));
assert!(zipper.descend_indexed_byte(1)); assert_eq!(zipper.child_mask().iter().collect::<Vec<_>>(), vec![b'n']);
assert!(zipper.ascend(1));
}
pub const ZIPPER_WITH_ROOT_PATH_KEYS: &[&[u8]] = &[b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub const ZIPPER_WITH_ROOT_PATH_PATH: &[u8] = b"ro";
pub fn zipper_with_root_path<Z: ZipperMoving>(mut zipper: Z) {
assert_eq!(zipper.path(), b"");
assert_eq!(zipper.child_count(), 1);
zipper.descend_to(b"m");
assert_eq!(zipper.path(), b"m");
assert_eq!(zipper.child_count(), 3);
zipper.descend_to(b"an");
assert_eq!(zipper.path(), b"man");
assert_eq!(zipper.child_count(), 2);
zipper.descend_to(b"e");
assert_eq!(zipper.path(), b"mane");
assert_eq!(zipper.child_count(), 0);
assert_eq!(zipper.ascend_until(), true);
zipper.descend_to(b"us");
assert_eq!(zipper.path(), b"manus");
assert_eq!(zipper.child_count(), 0);
assert_eq!(zipper.ascend_until(), true);
assert_eq!(zipper.path(), b"man");
assert_eq!(zipper.child_count(), 2);
assert_eq!(zipper.ascend_until(), true);
assert_eq!(zipper.path(), b"m");
assert_eq!(zipper.child_count(), 3);
assert_eq!(zipper.ascend_until(), true);
assert_eq!(zipper.path(), b"");
assert_eq!(zipper.child_count(), 1);
assert_eq!(zipper.at_root(), true);
assert_eq!(zipper.ascend_until(), false);
zipper.descend_to(b"manus");
assert_eq!(zipper.path(), b"manus");
assert_eq!(zipper.ascend(1), true);
assert_eq!(zipper.path(), b"manu");
assert_eq!(zipper.ascend(5), false);
assert_eq!(zipper.path(), b"");
assert_eq!(zipper.at_root(), true);
zipper.descend_to(b"mane");
assert_eq!(zipper.path(), b"mane");
assert_eq!(zipper.ascend(3), true);
assert_eq!(zipper.path(), b"m");
assert_eq!(zipper.child_count(), 3);
}
pub const ZIPPER_INDEXED_BYTE_TEST1_KEYS: &[&[u8]] = &[b"0", b"1", b"2", b"3", b"4", b"5", b"6"];
pub fn zipper_indexed_bytes_test1<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to("2");
assert_eq!(zip.is_val(), true);
assert_eq!(zip.child_count(), 0);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.path(), b"2");
zip.reset();
assert!(zip.descend_indexed_byte(2));
assert_eq!(zip.is_val(), true);
assert_eq!(zip.child_count(), 0);
assert_eq!(zip.path(), b"2");
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.path(), b"2");
zip.reset();
assert!(!zip.descend_indexed_byte(7));
assert_eq!(zip.is_val(), false);
assert_eq!(zip.child_count(), 7);
assert_eq!(zip.path(), b"");
let keys = ["000", "1Z", "00AAA", "00AA000", "00AA00AAA"];
let map: PathMap<()> = keys.into_iter().map(|v| (v, ())).collect();
let mut zip = map.read_zipper();
zip.descend_to("000");
assert_eq!(zip.val(), Some(&()));
assert_eq!(zip.path(), b"000");
assert_eq!(zip.child_count(), 0);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.path(), b"000");
zip.reset();
assert!(!zip.descend_indexed_byte(2));
assert_eq!(zip.child_count(), 2);
assert!(zip.descend_indexed_byte(1));
assert_eq!(zip.path(), b"1");
assert_eq!(zip.val(), None);
assert_eq!(zip.child_count(), 1);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.val(), None);
assert_eq!(zip.path(), b"1");
zip.reset();
assert!(zip.descend_indexed_byte(0));
assert_eq!(zip.path(), b"0");
assert_eq!(zip.val(), None);
assert_eq!(zip.child_count(), 1);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.val(), None);
assert_eq!(zip.path(), b"0");
}
pub const ZIPPER_INDEXED_BYTE_TEST2_KEYS: &[&[u8]] = &[b"000", b"1Z", b"00AAA", b"00AA000", b"00AA00AAA"];
pub fn zipper_indexed_bytes_test2<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to("000");
assert_eq!(zip.is_val(), true);
assert_eq!(zip.path(), b"000");
assert_eq!(zip.child_count(), 0);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.path(), b"000");
zip.reset();
assert!(!zip.descend_indexed_byte(2));
assert_eq!(zip.child_count(), 2);
assert!(zip.descend_indexed_byte(1));
assert_eq!(zip.path(), b"1");
assert_eq!(zip.is_val(), false);
assert_eq!(zip.child_count(), 1);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.is_val(), false);
assert_eq!(zip.path(), b"1");
zip.reset();
assert!(zip.descend_indexed_byte(0));
assert_eq!(zip.path(), b"0");
assert_eq!(zip.is_val(), false);
assert_eq!(zip.child_count(), 1);
assert!(!zip.descend_indexed_byte(1));
assert_eq!(zip.is_val(), false);
assert_eq!(zip.path(), b"0");
}
pub const ZIPPER_DESCEND_UNTIL_TEST1_KEYS: &[&[u8]] = &[b"a", b"ab", b"abCDEf", b"abCDEfGHi"];
pub fn zipper_descend_until_test1<Z: ZipperMoving>(mut zip: Z) {
for key in ZIPPER_DESCEND_UNTIL_TEST1_KEYS {
assert!(zip.descend_until());
assert_eq!(zip.path(), *key);
}
}
pub const ZIPPER_ASCEND_UNTIL_TEST1_KEYS: &[&[u8]] = &[b"AAa", b"AAb", b"AAc"];
pub fn zipper_ascend_until_test1<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to(b"AAaDDd");
assert!(!zip.path_exists());
assert_eq!(zip.path(), b"AAaDDd");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"AAa");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"AA");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"");
assert!(!zip.ascend_until());
}
pub const ZIPPER_ASCEND_UNTIL_TEST2_KEYS: &[&[u8]] = &[b"AAa", b"AAb"];
pub fn zipper_ascend_until_test2<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to(b"AAaDDd");
assert!(!zip.path_exists());
assert_eq!(zip.path(), b"AAaDDd");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"AAa");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"AA");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"");
assert!(!zip.ascend_until());
}
pub const ZIPPER_ASCEND_UNTIL_TEST3_KEYS: &[&[u8]] = &[b"1", b"12", b"123", b"1234", b"12345"];
pub fn zipper_ascend_until_test3<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to(b"123456");
assert_eq!(zip.path_exists(), false);
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"12345");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"1234");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"123");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"12");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"1");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"");
assert!(!zip.ascend_until());
assert!(zip.at_root());
zip.descend_to(b"12345");
assert!(zip.path_exists());
assert_eq!(zip.path(), b"12345");
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"");
assert!(zip.at_root());
let keys = ["1", "123", "12345", "1abc", "1234abc"];
let map: PathMap<()> = keys.into_iter().map(|v| (v, ())).collect();
let mut zip = map.read_zipper();
zip.descend_to(b"12345");
assert!(zip.path_exists());
assert_eq!(zip.path(), b"12345");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"1234"); assert_eq!(zip.is_val(), false);
assert_eq!(zip.child_count(), 2);
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"123"); assert_eq!(zip.child_count(), 1);
assert_eq!(zip.is_val(), true);
assert!(zip.ascend_until()); assert_eq!(zip.path(), b"1"); assert_eq!(zip.is_val(), true);
assert_eq!(zip.child_count(), 2);
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"");
assert_eq!(zip.child_count(), 1);
assert!(!zip.ascend_until());
assert!(zip.at_root());
zip.descend_to(b"12345");
assert!(zip.path_exists());
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"1234");
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"1");
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"");
assert!(!zip.ascend_until_branch());
assert!(zip.at_root());
}
pub const ZIPPER_ASCEND_UNTIL_TEST4_KEYS: &[&[u8]] = &[b"1", b"123", b"12345", b"1abc", b"1234abc"];
pub fn zipper_ascend_until_test4<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to(b"12345");
assert!(zip.path_exists());
assert_eq!(zip.path(), b"12345");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"1234"); assert_eq!(zip.is_val(), false);
assert_eq!(zip.child_count(), 2);
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"123"); assert_eq!(zip.child_count(), 1);
assert_eq!(zip.is_val(), true);
assert!(zip.ascend_until()); assert_eq!(zip.path(), b"1"); assert_eq!(zip.is_val(), true);
assert_eq!(zip.child_count(), 2);
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"");
assert_eq!(zip.child_count(), 1);
assert!(!zip.ascend_until());
assert!(zip.at_root());
zip.descend_to(b"12345");
assert!(zip.path_exists());
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"1234");
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"1");
assert!(zip.ascend_until_branch());
assert_eq!(zip.path(), b"");
assert!(!zip.ascend_until_branch());
assert!(zip.at_root());
}
pub const ZIPPER_ASCEND_UNTIL_TEST5_KEYS: &[&[u8]] = &[b"A", b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"];
pub fn zipper_ascend_until_test5<Z: ZipperMoving>(mut zip: Z) {
zip.descend_to(b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB");
assert_eq!(zip.path_exists(), false);
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"A");
assert!(zip.ascend_until());
assert_eq!(zip.path(), b"");
assert_eq!(zip.ascend_until(), false);
}
pub const ZIPPER_INDEXED_MOVEMENT_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub fn indexed_zipper_movement1<Z: ZipperMoving>(mut zipper: Z) {
fn descend_byte<Z: Zipper + ZipperMoving>(zipper: &mut Z, byte: u8) {
for i in 0..zipper.child_count() {
assert_eq!(zipper.descend_indexed_byte(i), true);
if *zipper.path().last().unwrap() == byte {
break
} else {
assert_eq!(zipper.ascend(1), true);
}
}
}
assert_eq!(zipper.path(), b"");
assert_eq!(zipper.child_count(), 4);
descend_byte(&mut zipper, b'r');
assert_eq!(zipper.path(), b"r");
assert_eq!(zipper.child_count(), 2);
assert_eq!(zipper.descend_until(), false);
descend_byte(&mut zipper, b'o');
assert_eq!(zipper.path(), b"ro");
assert_eq!(zipper.child_count(), 1);
assert_eq!(zipper.descend_until(), true);
assert_eq!(zipper.path(), b"rom");
assert_eq!(zipper.child_count(), 3);
zipper.reset();
assert_eq!(zipper.descend_until(), false);
descend_byte(&mut zipper, b'a');
assert_eq!(zipper.path(), b"a");
assert_eq!(zipper.child_count(), 1);
assert_eq!(zipper.descend_until(), true);
assert_eq!(zipper.path(), b"arrow");
assert_eq!(zipper.child_count(), 0);
assert_eq!(zipper.ascend(3), true);
assert_eq!(zipper.path(), b"ar");
assert_eq!(zipper.child_count(), 1);
}
pub const ZIPPER_VALUE_LOCATIONS_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub fn zipper_value_locations<Z: ZipperMoving>(mut zipper: Z) {
zipper.descend_to(b"ro");
assert!(zipper.path_exists());
assert_eq!(zipper.is_val(), false);
zipper.descend_to(b"mulus");
assert_eq!(zipper.is_val(), true);
zipper.reset();
zipper.descend_to(b"roman");
assert!(zipper.path_exists());
assert_eq!(zipper.is_val(), true);
zipper.descend_to(b"e");
assert_eq!(zipper.is_val(), true);
assert_eq!(zipper.ascend(1), true);
zipper.descend_to(b"u");
assert_eq!(zipper.is_val(), false);
zipper.descend_until();
assert_eq!(zipper.is_val(), true);
}
pub const ZIPPER_CHILD_MASK_TEST1_KEYS: &[&[u8]] = &[&[8, 194, 1, 45, 194, 1], &[34, 193]];
pub fn zipper_child_mask_test1<Z: ZipperMoving>(mut zipper: Z) {
zipper.descend_to(&[8, 194, 1]);
assert_eq!(zipper.path_exists(), true);
assert_eq!(zipper.child_count(), 1);
assert_eq!(zipper.child_mask(), [0x200000000000, 0, 0, 0]);
zipper.reset();
zipper.descend_to(&[8, 194, 1, 45]);
assert_eq!(zipper.path_exists(), true);
assert_eq!(zipper.child_count(), 1);
assert_eq!(zipper.child_mask(), [0, 0, 0, 0x4]);
}
pub const ZIPPER_CHILD_MASK_TEST2_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub fn zipper_child_mask_test2<Z: ZipperMoving>(mut zipper: Z) {
assert_eq!(zipper.child_mask(), [0, 1<<(b'a'-64) | 1<<(b'b'-64) | 1<<(b'c'-64) | 1<<(b'r'-64), 0, 0]);
let mut i = 0;
while zipper.to_next_step() {
match i {
0 => assert_eq!(zipper.child_mask(), [0, 1<<(b'r'-64), 0, 0]),
1 => assert_eq!(zipper.child_mask(), [0, 1<<(b'r'-64), 0, 0]),
2 => assert_eq!(zipper.child_mask(), [0, 1<<(b'o'-64), 0, 0]),
3 => assert_eq!(zipper.child_mask(), [0, 1<<(b'w'-64), 0, 0]),
4 => assert_eq!(zipper.child_mask(), [0, 0, 0, 0]),
14 => assert_eq!(zipper.child_mask(), [0, 1<<(b'o'-64) | 1<<(b'u'-64), 0, 0]),
_ => {}
}
i += 1;
}
}
pub const ZIPPER_DESCEND_TO_EXISTING_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub fn descend_to_existing_test1<Z: ZipperMoving>(mut zipper: Z) {
assert_eq!(3, zipper.descend_to_existing("bowling"));
assert_eq!("bow".as_bytes(), zipper.path());
zipper.reset();
assert_eq!(3, zipper.descend_to_existing("can"));
assert_eq!("can".as_bytes(), zipper.path());
zipper.reset();
assert_eq!(0, zipper.descend_to_existing(""));
assert_eq!("".as_bytes(), zipper.path());
zipper.reset();
}
pub const ZIPPER_DESCEND_TO_EXISTING_TEST2_KEYS: &[&[u8]] = &[b"arrow"];
pub fn descend_to_existing_test2<Z: ZipperMoving>(mut zipper: Z) {
assert_eq!(5, zipper.descend_to_existing("arrow0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"));
assert_eq!(zipper.path(), &b"arrow"[..]);
zipper.reset();
assert_eq!(3, zipper.descend_to_existing("arr0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"));
assert_eq!(zipper.path(), &b"arr"[..]);
}
pub const ZIPPER_DESCEND_TO_EXISTING_TEST3_KEYS: &[&[u8]] = &[b"arrow"];
pub fn descend_to_existing_test3<Z: ZipperMoving>(mut zipper: Z) {
zipper.descend_to("arrow00000");
assert_eq!(false, zipper.path_exists());
assert_eq!(zipper.path(), &b"arrow00000"[..]);
assert_eq!(0, zipper.descend_to_existing("0000"));
assert_eq!(zipper.path(), &b"arrow00000"[..]);
}
pub const ZIPPER_TO_NEXT_STEP_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"];
pub fn to_next_step_test1<Z: ZipperMoving>(mut zipper: Z) {
let mut i = 0;
while zipper.to_next_step() {
match i {
0 => assert_eq!(zipper.path(), b"a"),
4 => assert_eq!(zipper.path(), b"arrow"),
5 => assert_eq!(zipper.path(), b"b"),
7 => assert_eq!(zipper.path(), b"bow"),
8 => assert_eq!(zipper.path(), b"c"),
13 => assert_eq!(zipper.path(), b"cannon"),
14 => assert_eq!(zipper.path(), b"r"),
18 => assert_eq!(zipper.path(), b"rom'i"),
20 => assert_eq!(zipper.path(), b"roman"),
21 => assert_eq!(zipper.path(), b"romane"),
23 => assert_eq!(zipper.path(), b"romanus"),
24 => assert_eq!(zipper.path(), b"romu"),
25 => assert_eq!(zipper.path(), b"romul"),
26 => assert_eq!(zipper.path(), b"romulu"),
27 => assert_eq!(zipper.path(), b"romulus"),
28 => assert_eq!(zipper.path(), b"ru"),
32 => assert_eq!(zipper.path(), b"rubens"),
33 => assert_eq!(zipper.path(), b"ruber"),
37 => assert_eq!(zipper.path(), b"rubicon"),
42 => assert_eq!(zipper.path(), b"rubicundus"),
_ => {}
}
i += 1;
}
}
pub const ZIPPER_BYTES_ITER_TEST1_KEYS: &[&[u8]] = &[b"ABCDEFGHIJKLMNOPQRSTUVWXYZ", b"ab",];
pub fn zipper_byte_iter_test1<Z: ZipperMoving>(mut zipper: Z) {
zipper.descend_to_byte(b'A');
assert_eq!(zipper.path_exists(), true);
assert_eq!(zipper.descend_first_byte(), true);
assert_eq!(zipper.path(), b"AB");
assert_eq!(zipper.to_next_sibling_byte(), false);
assert_eq!(zipper.path(), b"AB");
}
pub const ZIPPER_BYTES_ITER_TEST2_KEYS: &[&[u8]] = &[&[2, 194, 1, 1, 193, 5], &[3, 194, 1, 0, 193, 6, 193, 5], &[3, 193, 4, 193]];
pub const ZIPPER_BYTES_ITER_TEST2_PATH: &[u8] = &[2, 194];
pub fn zipper_byte_iter_test2<Z: ZipperMoving>(mut zipper: Z) {
assert_eq!(zipper.descend_first_byte(), true);
assert_eq!(zipper.path(), &[1]);
assert_eq!(zipper.to_next_sibling_byte(), false);
assert_eq!(zipper.path(), &[1]);
}
pub const ZIPPER_BYTES_ITER_TEST3_KEYS: &[&[u8]] = &[&[3, 193, 4, 193, 5, 2, 193, 6, 193, 7], &[3, 193, 4, 193, 5, 2, 193, 6, 255]];
pub const ZIPPER_BYTES_ITER_TEST3_PATH: &[u8] = &[3, 193, 4, 193, 5, 2, 193];
pub fn zipper_byte_iter_test3<Z: ZipperMoving>(mut zipper: Z) {
assert_eq!(zipper.path(), &[]);
assert_eq!(zipper.descend_first_byte(), true);
assert_eq!(zipper.path(), &[6]);
assert_eq!(zipper.descend_first_byte(), true);
assert_eq!(zipper.path(), &[6, 193]);
assert_eq!(zipper.descend_first_byte(), true);
assert_eq!(zipper.path(), &[6, 193, 7]);
}
pub const ZIPPER_BYTES_ITER_TEST4_KEYS: &[&[u8]] = &[b"ABC", b"ABCDEF", b"ABCdef"];
pub fn zipper_byte_iter_test4<Z: ZipperMoving>(mut zipper: Z) {
while zipper.descend_first_byte() {}
assert_eq!(zipper.path(), b"ABCDEF");
zipper.reset();
zipper.descend_to(b"ABC");
assert!(zipper.path_exists());
assert_eq!(zipper.path(), b"ABC");
assert!(zipper.descend_indexed_byte(1));
assert_eq!(zipper.path(), b"ABCd");
assert!(zipper.descend_first_byte());
assert_eq!(zipper.path(), b"ABCde");
assert!(zipper.descend_first_byte());
assert_eq!(zipper.path(), b"ABCdef");
assert!(!zipper.descend_first_byte());
}
pub const ZIPPER_BYTES_ITER_TEST5_KEYS: &[&[u8]] = &[
&[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193, 75, 128, 131, 193, 49],
&[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 84, 3, 193, 75, 192, 192, 3, 193, 75, 128, 131, 193, 49],
&[2, 197, 97, 120, 255, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 84, 3, 193, 75, 192, 192, 3, 193, 75, 128, 131, 193, 49],
];
pub fn zipper_byte_iter_test5<Z: ZipperMoving>(mut zipper: Z) {
let keys = ZIPPER_BYTES_ITER_TEST5_KEYS;
for i in 0..keys[0].len() {
zipper.reset();
zipper.descend_to(&keys[0][..i]);
if i != 18 && i != 5 {
assert_eq!(zipper.to_next_sibling_byte(), false);
}
}
zipper.reset();
zipper.descend_to([2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75]);
assert_eq!(zipper.to_next_sibling_byte(), true);
zipper.reset();
zipper.descend_to([2, 197, 97, 120, 105]);
assert_eq!(zipper.to_next_sibling_byte(), true);
}
}
#[cfg(test)]
pub(crate) mod zipper_iteration_tests {
use super::*;
macro_rules! zipper_iteration_tests {
($z_name:ident, $read_keys:expr, $make_z:expr)=>{
paste::paste! {
#[test]
fn [<$z_name _zipper_iter_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::ZIPPER_ITER_TEST1_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_iteration_tests::zipper_iter_test1)
}
#[test]
fn [<$z_name _zipper_iter_test2>]() {
let paths = crate::zipper::zipper_iteration_tests::zipper_iter_test2_paths();
let path_refs: Vec<&[u8]> = paths.iter().map(|path| &path[..]).collect();
let mut temp_store = $read_keys(&path_refs[..]);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b"in", crate::zipper::zipper_iteration_tests::zipper_iter_test2)
}
#[test]
fn [<$z_name _k_path_test1>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST1_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b":", crate::zipper::zipper_iteration_tests::k_path_test1)
}
#[test]
fn [<$z_name _k_path_test2>]() {
let paths = crate::zipper::zipper_iteration_tests::k_path_test2_paths();
let path_refs: Vec<&[u8]> = paths.iter().map(|path| &path[..]).collect();
let mut temp_store = $read_keys(&path_refs[..]);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, &[], crate::zipper::zipper_iteration_tests::k_path_test2)
}
#[test]
fn [<$z_name _k_path_test3>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST3_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b":", crate::zipper::zipper_iteration_tests::k_path_test3)
}
#[test]
fn [<$z_name _k_path_test4>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST4_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b"", crate::zipper::zipper_iteration_tests::k_path_test4)
}
#[test]
fn [<$z_name _k_path_test5>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST5_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b"", crate::zipper::zipper_iteration_tests::k_path_test5)
}
#[test]
fn [<$z_name _k_path_test6>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST6_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b"", crate::zipper::zipper_iteration_tests::k_path_test6)
}
#[test]
fn [<$z_name _k_path_test7>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST7_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b"", crate::zipper::zipper_iteration_tests::k_path_test7)
}
#[test]
fn [<$z_name _k_path_test8>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST8_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, b"", crate::zipper::zipper_iteration_tests::k_path_test8)
}
#[test]
fn [<$z_name _k_path_test9>]() {
let mut temp_store = $read_keys(crate::zipper::zipper_iteration_tests::K_PATH_TEST9_KEYS);
crate::zipper::zipper_iteration_tests::run_test(&mut temp_store, $make_z, &[2, 194], crate::zipper::zipper_iteration_tests::k_path_test9)
}
}
}
}
pub(crate) use zipper_iteration_tests;
pub fn run_test<'a, T: 'a + ZipperIteration, Store>(
store: &'a mut Store,
make_t: impl Fn(&'a mut Store, &[u8]) -> T,
z_path: &[u8],
test_f: impl Fn(T)
) {
let t = make_t(store, z_path);
test_f(t);
}
pub const ZIPPER_ITER_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"rom'i", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus"];
pub fn zipper_iter_test1<'a, Z: ZipperIteration>(mut zipper: Z) {
let keys = ZIPPER_ITER_TEST1_KEYS;
let mut idx = 0;
assert_eq!(zipper.is_val(), false);
while zipper.to_next_val() {
println!("{idx} {}", std::str::from_utf8(zipper.path()).unwrap());
assert_eq!(keys[idx], zipper.path());
idx += 1;
}
assert_eq!(idx, keys.len());
}
const ZIPPER_ITER_TEST2_COUNT: usize = 32;
pub fn zipper_iter_test2_paths() -> Vec<Vec<u8>> {
(0usize..ZIPPER_ITER_TEST2_COUNT).into_iter().map(|i| {
[b"in", &i.to_be_bytes()[..]].concat()
}).collect()
}
pub fn zipper_iter_test2<'a, Z: ZipperIteration>(mut zipper: Z) {
let mut count: usize = 0;
while zipper.to_next_val() {
assert_eq!(zipper.is_val(), true);
assert_eq!(zipper.path(), count.to_be_bytes());
count += 1;
}
assert_eq!(count, ZIPPER_ITER_TEST2_COUNT);
}
pub const K_PATH_TEST1_KEYS: &[&[u8]] = &[
b":5:above:3:the:4:fray:",
b":5:err:",
b":5:erronious:6:potato:",
b":5:error:2:is:2:my:4:name:",
b":5:hello:5:world:",
b":5:mucky:4:muck:",
b":5:roger:6:rabbit:",
b":5:zebra:",
b":9:muckymuck:5:raker:",
];
pub fn k_path_test1<'a, Z: ZipperIteration>(mut zipper: Z) {
assert!(zipper.descend_indexed_byte(0));
let sym_len = usize::from_str_radix(std::str::from_utf8(&[zipper.path()[0]]).unwrap(), 10).unwrap();
assert_eq!(sym_len, 5);
assert!(zipper.descend_indexed_byte(0));
assert_eq!(zipper.child_count(), 6);
assert_eq!(zipper.descend_first_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:above:");
assert_eq!(zipper.to_next_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:erroni");
assert_ne!(zipper.path().last(), Some(&b':'));
assert_eq!(zipper.to_next_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:error:");
assert_eq!(zipper.to_next_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:hello:");
assert_eq!(zipper.to_next_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:mucky:");
assert_eq!(zipper.to_next_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:roger:");
assert_eq!(zipper.to_next_k_path(sym_len+1), true);
assert_eq!(zipper.path(), b"5:zebra:");
assert_eq!(zipper.to_next_k_path(sym_len+1), false);
assert_eq!(zipper.path(), b"5:");
assert_eq!(zipper.child_count(), 6);
}
const K_PATH_TEST2_COUNT: usize = 50;
pub fn k_path_test2_paths() -> Vec<Vec<u8>> {
(0..K_PATH_TEST2_COUNT).into_iter().map(|i| {
let len = (i % 15) + 5; (0..len).into_iter().map(|j| ((j+i) % 255) as u8).collect()
}).collect()
}
pub fn k_path_test2<'a, Z: ZipperIteration>(mut zipper: Z) {
zipper.descend_first_k_path(5);
let mut count = 1;
while zipper.to_next_k_path(5) {
count += 1;
}
assert_eq!(count, K_PATH_TEST2_COUNT);
}
pub const K_PATH_TEST3_KEYS: &[&[u8]] = &[b":1a1A", b":1a1B", b":1a1C", b":1b1A", b":1b1B", b":1b1C", b":1c1A"];
pub fn k_path_test3<'a, Z: ZipperIteration>(mut zipper: Z) {
zipper.descend_to(b"1");
assert_eq!(zipper.path_exists(), true);
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), b"1a");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1b");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1c");
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), b"1");
zipper.reset();
zipper.descend_to(b"1a1");
assert!(zipper.path_exists());
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), b"1a1A");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1a1B");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1a1C");
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), b"1a1");
zipper.reset();
zipper.descend_to(b"1");
assert!(zipper.path_exists());
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), b"1a");
assert_eq!(zipper.descend_first_k_path(2), true);
assert_eq!(zipper.path(), b"1a1A");
assert_eq!(zipper.to_next_k_path(2), true);
assert_eq!(zipper.path(), b"1a1B");
assert_eq!(zipper.to_next_k_path(2), true);
assert_eq!(zipper.path(), b"1a1C");
assert_eq!(zipper.to_next_k_path(2), false);
assert_eq!(zipper.path(), b"1a");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1b");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1c");
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), b"1");
zipper.reset();
zipper.descend_to(b"1");
assert!(zipper.path_exists());
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), b"1a");
assert_eq!(zipper.descend_indexed_byte(0), true);
assert_eq!(zipper.path(), b"1a1");
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), b"1a1A");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1a1B");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1a1C");
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), b"1a1");
assert_eq!(zipper.ascend(1), true);
assert_eq!(zipper.path(), b"1a");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1b");
assert_eq!(zipper.to_next_k_path(1), true);
assert_eq!(zipper.path(), b"1c");
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), b"1");
}
pub const K_PATH_TEST4_KEYS: &[&[u8]] = &[
&[100, 74, 37, 218, 90, 211, 23, 84, 226, 59, 193, 236],
&[199, 102, 166, 28, 234, 168, 198, 13],
&[101, 241, 88, 163, 2, 9, 37, 110, 53, 201, 251, 164, 23, 162, 216],
&[237, 8, 108, 15, 63, 3, 249, 78, 200, 154, 103, 191],
&[106, 30, 34, 182, 157, 102, 126, 90, 200, 5, 93, 0, 163, 245, 112],
&[188, 177, 13, 5, 50, 66, 169, 113, 157, 202, 72, 11, 79, 73],
&[250, 96, 103, 31, 32, 104],
&[100, 152, 199, 46, 48, 252, 139, 150, 158, 8, 57, 50, 123],
&[65, 16, 128, 207, 27, 252, 145, 123, 105, 238, 230],
&[244, 34, 40, 224, 11, 125, 102],
&[116, 63, 105, 214, 137, 86, 202],
&[63, 70, 201, 21, 131, 60],
&[139, 209, 149, 73, 172, 12, 139, 80, 184, 105],
&[253, 235, 49, 156, 40, 50, 60, 73, 145, 249],
&[228, 81, 220, 29, 208, 234, 27],
&[116, 109, 134, 122, 15, 78, 126, 240, 158, 42, 221, 229, 93, 200, 194],
&[180, 216, 189, 14, 82, 14, 170, 195, 196, 42, 177, 144, 153, 156, 140, 109, 93, 78, 157],
&[190, 6, 59, 69, 208, 253, 2, 33, 86],
&[245, 168, 144, 122, 243, 111],
&[123, 150, 249, 114, 32, 140, 186, 204, 199, 8, 205, 150, 34, 104, 186, 236],
&[8, 29, 191, 189, 72, 101, 39, 24, 105, 44, 13, 87, 75, 187],
&[14, 201, 29, 151, 113, 10, 175],
&[83, 130, 247, 5, 250, 101, 141, 5, 42, 132, 205, 3, 118, 152, 33, 219, 1, 91, 204],
&[207, 215, 38, 17, 244, 96],
&[34, 132, 138, 222, 250, 162, 231, 68, 142, 162, 152, 172, 244, 102, 179, 111, 161, 95],
&[124, 120, 11, 4, 219, 210, 172, 50, 182, 160, 86, 88, 136, 122, 97, 98],
&[86, 74, 181, 17, 3, 173, 12],
&[18, 234, 66, 134, 20],
&[20, 24, 83, 219, 209, 20, 236, 128, 155, 15, 110, 54, 237, 105, 186, 62],
&[67, 11, 50, 124, 120, 33, 218],
&[89, 248, 169, 97, 245, 98, 230, 53, 114, 198, 227, 148, 22, 127, 198, 153, 238, 59, 223],
&[100, 128, 38, 54, 171, 186, 9, 133, 191, 82, 113, 86, 10, 72, 236, 124, 201, 65],
&[152, 115, 99, 124, 81, 254, 0, 179, 24, 87, 24, 77, 60],
&[107, 117, 222, 38, 162, 193, 48, 44, 140, 162, 104, 139, 90],
&[63, 29, 217, 85, 63, 130, 110, 121, 227, 43, 215, 223, 249, 1, 72, 134, 92, 188],
&[117, 3, 144, 15, 103, 113, 130, 253, 0, 102, 47, 24, 234, 0, 159],
&[38, 60, 197, 120, 53, 94, 202, 137, 116, 27, 12, 181],
&[248, 41, 252, 254, 98, 173, 42, 92, 30, 65, 72],
&[240, 147, 89, 110, 224, 8],
&[199, 86, 108, 195, 62, 169, 61],
&[93, 225, 21, 185, 91, 23, 19, 7, 108, 176, 191, 91],
&[70, 10, 122, 77, 171],
&[32, 161, 24, 162, 112, 152, 21, 226, 149, 253, 212, 246, 175, 182],
&[99, 7, 213, 87, 192, 2, 110, 242, 222, 89, 20, 83, 138, 112],
&[92, 64, 61, 35, 111, 41, 151, 121, 24, 157],
&[115, 201, 114, 124, 135, 246, 93, 230, 210, 164, 213, 254, 108, 181, 77, 19, 103, 166],
&[26, 231, 59, 238, 246],
&[52, 74, 93, 202, 140, 11, 56, 46, 211, 194, 137, 65, 36, 90, 209],
&[56, 245, 179, 40, 190, 168, 116, 115],
&[192, 215, 69, 171, 218, 187, 202, 120, 92, 33, 14, 77, 34, 46, 40, 93, 135, 117, 152],
];
pub fn k_path_test4<'a, Z: ZipperIteration>(mut zipper: Z) {
zipper.descend_first_k_path(5);
let mut count = 1;
while zipper.to_next_k_path(5) {
count += 1;
}
assert_eq!(count, K_PATH_TEST4_KEYS.len());
}
pub const K_PATH_TEST5_KEYS: &[&[u8]] = &[
&[3, 193, 4, 194, 1, 43, 3, 193, 8, 194, 1, 45, 194, 1, 46],
&[3, 193, 4, 194, 1, 43, 3, 193, 34, 193],
];
pub fn k_path_test5<'a, Z: ZipperIteration>(mut zipper: Z) {
zipper.descend_to(&[3, 193, 4, 194, 1, 43, 3, 193, 8, 194, 1, 45, 194]);
assert!(zipper.path_exists());
assert_eq!(zipper.descend_first_k_path(2), true);
assert_eq!(zipper.path(), &[3, 193, 4, 194, 1, 43, 3, 193, 8, 194, 1, 45, 194, 1, 46]);
assert_eq!(zipper.to_next_k_path(2), false);
assert_eq!(zipper.path(), &[3, 193, 4, 194, 1, 43, 3, 193, 8, 194, 1, 45, 194]);
}
pub const K_PATH_TEST6_KEYS: &[&[u8]] = &[
&[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193, 75, 128, 131, 193, 49],
&[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 84, 3, 193, 75, 192, 192, 3, 193, 75, 128, 131, 193, 49],
];
pub fn k_path_test6<'a, Z: ZipperIteration>(mut zipper: Z) {
fn test_loop<'a, Z: ZipperMoving + ZipperIteration, AscendF: Fn(&mut Z, usize), DescendF: Fn(&mut Z, &[u8])>(zipper: &mut Z, descend_f: DescendF, ascend_f: AscendF) {
zipper.reset();
descend_f(zipper, &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193]);
assert!(zipper.descend_first_k_path(1));
assert_eq!(zipper.path(), &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75]);
descend_f(zipper, &[192, 3, 193, 84, 192, 3, 193]);
assert!(zipper.descend_first_k_path(1));
assert_eq!(zipper.path(), &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193, 75]);
descend_f(zipper, &[128, 131, 193]);
assert!(zipper.descend_first_k_path(1));
assert_eq!(zipper.path(), &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193, 75, 128, 131, 193, 49]);
assert!(!zipper.to_next_k_path(1));
assert_eq!(zipper.path(), &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193, 75, 128, 131, 193]);
ascend_f(zipper, 3);
assert!(!zipper.to_next_k_path(1));
assert_eq!(zipper.path(), &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193]);
ascend_f(zipper, 7);
assert!(zipper.to_next_k_path(1));
assert_eq!(zipper.path(), &[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 84]);
ascend_f(zipper, 17);
}
test_loop(&mut zipper,
|zipper, path| {
zipper.descend_to(path);
assert!(zipper.path_exists());
},
|zipper, steps| assert!(zipper.ascend(steps)),
);
test_loop(&mut zipper,
|zipper, path| {
for byte in path {
zipper.descend_to_byte(*byte);
assert!(zipper.path_exists());
}
},
|zipper, steps| {
for _ in 0..steps {
assert!(zipper.ascend_byte())
}
},
);
test_loop(&mut zipper,
|zipper, path| {
for _ in 0..path.len() {
assert!(zipper.descend_first_byte());
}
},
|zipper, steps| {
for _ in 0..steps {
assert!(zipper.ascend_byte())
}
},
);
}
pub const K_PATH_TEST7_KEYS: &[&[u8]] = &[
&[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75, 192, 3, 193, 84, 192, 3, 193, 75, 128, 131, 193, 49],
&[2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 84, 3, 193, 75, 192, 192, 3, 193, 75, 128, 131, 193, 49],
];
pub fn k_path_test7<'a, Z: ZipperIteration>(mut zipper: Z) {
let keys = K_PATH_TEST7_KEYS;
for i in 0..keys[0].len() {
assert_eq!(zipper.path(), &keys[0][..i]);
assert!(zipper.descend_first_k_path(1));
}
for i in (0..keys[0].len()).rev() {
assert_eq!(zipper.path(), &keys[0][..=i]);
if i != 17 {
assert!(!zipper.to_next_k_path(1));
} else {
assert!(zipper.to_next_k_path(1));
assert!(!zipper.to_next_k_path(1));
}
}
}
pub const K_PATH_TEST8_KEYS: &[&[u8]] = &[b"ABCDEFGHIJKLMNOPQRSTUVWXYZ", b"ab",];
pub fn k_path_test8<'a, Z: ZipperIteration>(mut zipper: Z) {
zipper.reset();
zipper.descend_to_byte(b'A');
assert_eq!(zipper.path_exists(), true);
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), b"AB");
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), b"A");
}
pub const K_PATH_TEST9_KEYS: &[&[u8]] = &[
&[2, 194, 1, 1, 193, 5],
&[3, 194, 1, 0, 193, 6, 193, 5],
&[3, 193, 4, 193],
];
pub fn k_path_test9<'a, Z: ZipperIteration>(mut zipper: Z) {
zipper.reset();
assert_eq!(zipper.descend_first_k_path(1), true);
assert_eq!(zipper.path(), &[1]);
assert_eq!(zipper.to_next_k_path(1), false);
assert_eq!(zipper.path(), &[]);
}
}
#[cfg(test)]
mod tests {
use crate::{alloc::global_alloc, PathMap};
use super::*;
super::zipper_moving_tests::zipper_moving_tests!(read_zipper,
|keys: &[&[u8]]| {
let mut btm = PathMap::new();
keys.iter().for_each(|k| { btm.set_val_at(k, ()); });
btm
},
|btm: &mut PathMap<()>, path: &[u8]| -> ReadZipperUntracked<()> {
btm.read_zipper_at_path(path)
});
super::zipper_iteration_tests::zipper_iteration_tests!(read_zipper,
|keys: &[&[u8]]| {
let mut btm = PathMap::new();
keys.iter().for_each(|k| { btm.set_val_at(k, ()); });
btm
},
|btm: &mut PathMap<()>, path: &[u8]| -> ReadZipperUntracked<()> {
btm.read_zipper_at_path(path)
});
super::zipper_moving_tests::zipper_moving_tests!(read_zipper_owned,
|keys: &[&[u8]]| {
let mut btm = PathMap::new();
keys.iter().for_each(|k| { btm.set_val_at(k, ()); });
btm
},
|btm: &mut PathMap<()>, path: &[u8]| -> ReadZipperOwned<()> {
core::mem::take(btm).into_read_zipper(path)
});
super::zipper_iteration_tests::zipper_iteration_tests!(read_zipper_owned,
|keys: &[&[u8]]| {
let mut btm = PathMap::new();
keys.iter().for_each(|k| { btm.set_val_at(k, ()); });
btm
},
|btm: &mut PathMap<()>, path: &[u8]| -> ReadZipperOwned<()> {
core::mem::take(btm).into_read_zipper(path)
});
#[test]
fn zipper_value_access() {
let mut btm = PathMap::new();
let rs = ["arrow", "bow", "cannon", "roman", "romane", "romanus", "romulus", "rubens", "ruber", "rubicon", "rubicundus", "rom'i"];
rs.iter().for_each(|r| { btm.set_val_at(r.as_bytes(), *r); });
let root_key = b"ro";
let mut zipper = ReadZipperCore::new_with_node_and_path_in(btm.root().unwrap(), false, root_key, root_key.len(), 0, None, global_alloc());
assert_eq!(zipper.is_val(), false);
zipper.descend_to(b"mulus");
assert_eq!(zipper.is_val(), true);
assert_eq!(zipper.val(), Some(&"romulus"));
let root_key = b"roman";
let mut zipper = ReadZipperCore::new_with_node_and_path_in(btm.root().unwrap(), false, root_key, root_key.len(), 0, None, global_alloc());
assert_eq!(zipper.is_val(), true);
assert_eq!(zipper.val(), Some(&"roman"));
zipper.descend_to(b"e");
assert_eq!(zipper.is_val(), true);
assert_eq!(zipper.val(), Some(&"romane"));
assert_eq!(zipper.ascend(1), true);
zipper.descend_to(b"u");
assert_eq!(zipper.is_val(), false);
assert_eq!(zipper.val(), None);
zipper.descend_until();
assert_eq!(zipper.is_val(), true);
assert_eq!(zipper.val(), Some(&"romanus"));
}
#[test]
fn read_zipper_special_iter_test1() {
let mut btm = PathMap::new();
let rs = ["arrow", "bow", "cannon", "roman", "romane", "romanus", "romulus", "rubens", "ruber", "rubicon", "rubicundus", "rom'i"];
rs.iter().enumerate().for_each(|(i, r)| { btm.set_val_at(r.as_bytes(), i); });
let mut zipper = btm.read_zipper();
zipper.descend_to(b"rub");
let mut sub_zipper = zipper.fork_read_zipper();
while let Some(&val) = sub_zipper.to_next_get_val() {
assert_eq!(&rs[val].as_bytes()[3..], sub_zipper.path());
}
drop(sub_zipper);
for (path, &val) in zipper {
assert_eq!(rs[val].as_bytes(), path);
}
}
#[test]
fn read_zipper_special_iter_test2() {
let mut map = PathMap::<u64>::new();
let mut zipper = map.read_zipper();
assert_eq!(zipper.to_next_val(), false);
assert_eq!(zipper.to_next_val(), false);
drop(zipper);
let map_head = map.zipper_head();
let _wz = map_head.write_zipper_at_exclusive_path(b"0");
drop(_wz);
drop(map_head);
let mut zipper = map.read_zipper();
assert_eq!(zipper.to_next_val(), false);
assert_eq!(zipper.to_next_val(), false);
}
#[test]
fn read_zipper_special_iter_test3() {
const N: usize = 32;
let mut map = PathMap::<usize>::new();
let mut zipper = map.write_zipper_at_path(b"in");
for i in 0usize..N {
zipper.descend_to(i.to_be_bytes());
zipper.set_val(i);
zipper.reset();
}
drop(zipper);
let mut reader_z = map.read_zipper_at_path(b"in");
assert_eq!(reader_z.val_count(), N);
let mut count = 0;
while let Some(val) = reader_z.to_next_get_val() {
assert_eq!(reader_z.get_val(), Some(val));
assert_eq!(reader_z.get_val(), Some(&count));
assert_eq!(reader_z.path(), count.to_be_bytes());
count += 1;
}
assert_eq!(count, N);
}
#[test]
fn read_zipper_special_iter_test4() {
const R_KEY_CNT: usize = 9;
let keys = ["arrow", "bow", "cannon", "roman", "romane", "romanus", "romulus", "rubens", "ruber", "rubicon", "rubicundus", "rom'i"];
let mut map: PathMap::<()> = keys.into_iter().map(|k| (k, ())).collect();
let rz = map.read_zipper_at_borrowed_path(b"r");
assert_eq!(rz.val_count(), R_KEY_CNT);
let mut count = 0;
for (_path, _) in rz {
count += 1;
}
assert_eq!(count, R_KEY_CNT);
let wz = map.write_zipper_at_path(b"r");
assert_eq!(wz.val_count(), R_KEY_CNT);
let rz = wz.fork_read_zipper();
assert_eq!(rz.val_count(), R_KEY_CNT);
let mut count = 0;
for (_path, _) in rz {
count += 1;
}
assert_eq!(count, R_KEY_CNT);
}
#[test]
fn read_zipper_special_zipper_iter_test5() {
let mut map = PathMap::<usize>::new();
let mut zipper = map.write_zipper_at_path(b"in");
for i in 0usize..2 {
zipper.descend_to_byte(i as u8);
zipper.descend_to(i.to_be_bytes());
zipper.set_val(i);
zipper.reset();
}
drop(zipper);
let mut reader_z = map.read_zipper_at_path([b'i', b'n', 1]);
let mut sanity_counter = 0;
while reader_z.to_next_val() {
sanity_counter += 1;
}
assert_eq!(sanity_counter, 1);
}
#[test]
fn read_zipper_special_byte_iter_test() {
let keys = vec![[0, 3], [0, 4], [0, 5]];
let map: PathMap<()> = keys.into_iter().map(|v| (v, ())).collect();
let mut r0 = map.read_zipper();
r0.descend_to_byte(0);
assert_eq!(r0.path_exists(), true);
let mut r1 = r0.fork_read_zipper();
assert_eq!(r1.to_next_sibling_byte(), false);
assert_eq!(r1.child_mask().0[0], (1<<3) | (1<<4) | (1<<5));
r1.descend_to_byte(3);
assert_eq!(r1.path_exists(), true);
assert_eq!(r1.child_mask().0[0], 0);
assert_eq!(r1.to_next_sibling_byte(), true);
assert_eq!(r1.origin_path(), &[0, 4]);
assert_eq!(r1.path(), &[4]);
assert_eq!(r1.to_next_sibling_byte(), true);
assert_eq!(r1.to_next_sibling_byte(), false);
}
#[test]
fn path_concat_test() {
let parent_path = "�parent";
let exprs = [
"�parent�Tom�Bob",
"�parent�Pam�Bob",
"�parent�Tom�Liz",
"�parent�Bob�Ann",
"�parent�Bob�Pat",
"�parent�Pat�Jim",
"�female�Pam",
"�male�Tom",
"�male�Bob",
"�female�Liz",
"�female�Pat",
"�female�Ann",
"�male�Jim",
];
let family: PathMap<i32> = exprs.iter().enumerate().map(|(i, k)| (k, i as i32)).collect();
let mut parent_zipper = family.read_zipper_at_path(parent_path.as_bytes());
assert!(family.path_exists_at(parent_path));
let mut full_parent_path = parent_path.as_bytes().to_vec();
full_parent_path.extend(parent_zipper.path());
assert!(family.path_exists_at(&full_parent_path));
while parent_zipper.to_next_val() {
let mut full_parent_path = parent_path.as_bytes().to_vec();
full_parent_path.extend(parent_zipper.path());
assert!(family.contains(&full_parent_path));
assert_eq!(full_parent_path, parent_zipper.origin_path());
}
}
#[test]
fn full_path_test() {
let rs = ["arrow", "bow", "cannon", "roman", "romane", "romanus", "romulus", "rubens", "ruber", "rubicon", "rubicundus", "rom'i"];
let btm: PathMap<u64> = rs.into_iter().enumerate().map(|(i, k)| (k, i as u64)).collect();
let mut zipper = btm.read_zipper_at_path(b"roma");
assert_eq!(format!("roma{}", std::str::from_utf8(zipper.path()).unwrap()), "roma");
assert_eq!(std::str::from_utf8(zipper.origin_path()).unwrap(), "roma");
zipper.descend_to(b"n");
assert_eq!(format!("roma{}", std::str::from_utf8(zipper.path()).unwrap()), "roman");
assert_eq!(std::str::from_utf8(zipper.origin_path()).unwrap(), "roman");
zipper.to_next_val();
assert_eq!(format!("roma{}", std::str::from_utf8(zipper.path()).unwrap()), "romane");
assert_eq!(std::str::from_utf8(zipper.origin_path()).unwrap(), "romane");
zipper.to_next_val();
assert_eq!(format!("roma{}", std::str::from_utf8(zipper.path()).unwrap()), "romanus");
assert_eq!(std::str::from_utf8(zipper.origin_path()).unwrap(), "romanus");
zipper.to_next_val();
assert_eq!(zipper.path().len(), 0);
}
#[test]
fn read_zipper_is_shared_test1() {
let l0_keys = vec!["stem0", "stem1", "stem2", "strongbad", "strange", "steam", "stevador", "steeple"];
let l1_keys = vec!["A-mid0", "B-mid1", "C-mid2", "D-midlands", "D-middling", "D-middlemarch"];
let l2_keys = vec!["X-top0", "X-top1", "X-top2", "X-top3"];
let top_map: PathMap<()> = l2_keys.iter().map(|v| (v, ())).collect();
let mut mid_map = PathMap::<()>::new();
let mut wz = mid_map.write_zipper();
for key in l1_keys.iter() {
wz.reset();
wz.descend_to(key);
wz.graft_map(top_map.clone());
}
drop(wz);
let mut map = PathMap::<()>::new();
let mut wz = map.write_zipper();
for key in l0_keys.iter() {
wz.reset();
wz.descend_to(key);
wz.graft_map(mid_map.clone());
}
drop(wz);
assert_eq!(map.val_count(), l0_keys.len() * l1_keys.len() * l2_keys.len());
let mut rz = map.read_zipper();
let mut shared_cnt = 0;
while rz.to_next_step() {
if rz.is_shared() {
shared_cnt += 1;
}
}
assert_eq!(shared_cnt, l0_keys.len() + l0_keys.len() * l1_keys.len());
}
#[test]
fn read_zipper_is_shared_test2() {
let l0_keys = vec!["steam", "steamboat"];
let l1_keys = vec!["X0", "X1", "X2"];
let top_map: PathMap<()> = l1_keys.iter().map(|v| (v, ())).collect();
let mut map = PathMap::<()>::new();
let mut wz = map.write_zipper();
for key in l0_keys.iter() {
wz.reset();
wz.descend_to(key);
wz.graft_map(top_map.clone());
}
drop(wz);
assert_eq!(map.val_count(), l0_keys.len() * l1_keys.len());
let mut rz = map.read_zipper();
let mut shared_cnt = 0;
while rz.to_next_step() {
if rz.is_shared() {
shared_cnt += 1;
}
}
assert_eq!(shared_cnt, 3);
}
#[test]
fn read_zipper_focus_nodes() {
let mut map = PathMap::<()>::new();
map.set_val_at(b"one.val", ());
map.set_val_at(b"one.two.val", ());
map.set_val_at(b"one.two.three.val", ());
map.set_val_at(b"one.two.three.four.val", ());
let mut rz = map.read_zipper();
rz.descend_to(b"one.");
let node = rz.try_borrow_focus().unwrap();
assert_eq!(node.as_tagged().count_branches(&[]), 2);
let expected_mask: ByteMask = [b't', b'v'].into_iter().collect();
assert_eq!(node.as_tagged().node_branches_mask(&[]), expected_mask);
assert!(matches!(rz.get_focus(), AbstractNodeRef::BorrowedRc(_))); drop(rz);
let rz = map.read_zipper_at_borrowed_path(b"one.two.");
let node = rz.try_borrow_focus().unwrap();
assert_eq!(node.as_tagged().count_branches(&[]), 2);
let expected_mask: ByteMask = [b't', b'v'].into_iter().collect();
assert_eq!(node.as_tagged().node_branches_mask(&[]), expected_mask);
assert!(matches!(rz.get_focus(), AbstractNodeRef::BorrowedRc(_))); drop(rz);
let zh = map.zipper_head();
let rz = zh.read_zipper_at_borrowed_path(b"one.two.three.").unwrap();
let node = rz.try_borrow_focus().unwrap();
assert_eq!(node.as_tagged().count_branches(&[]), 2);
let expected_mask: ByteMask = [b'f', b'v'].into_iter().collect();
assert_eq!(node.as_tagged().node_branches_mask(&[]), expected_mask);
assert!(matches!(rz.get_focus(), AbstractNodeRef::BorrowedRc(_))); }
const ZIPPER_VAL_COUNT_TEST1_KEYS: &[&[u8]] = &[b"", b"arrow"];
#[test]
fn read_zipper_val_count_test1() {
let map: PathMap<()> = ZIPPER_VAL_COUNT_TEST1_KEYS.into_iter().cloned().collect();
let mut zipper = map.read_zipper();
assert_eq!(zipper.path(), b"");
assert_eq!(zipper.val_count(), 2);
assert_eq!(zipper.descend_until(), true);
assert_eq!(zipper.path(), b"arrow");
assert_eq!(zipper.val_count(), 1);
}
#[test]
fn descend_last_path() {
let rs = ["arrow", "bow", "cannon", "roman", "romane", "romanus", "romulus", "rubens", "ruber", "rubicon", "rubicundus", "rom'i"];
let btm: PathMap<u64> = rs.into_iter().enumerate().map(|(i, k)| (k, i as u64)).collect();
let mut rz = btm.read_zipper();
assert!(rz.descend_last_path());
assert_eq!(rz.path(), b"rubicundus");
}
}