use core::slice;
use core::mem::MaybeUninit;
use crate::alloc::{global_alloc, Allocator, GlobalAlloc};
use crate::utils::ByteMask;
use crate::PathMap;
use crate::trie_node::*;
use crate::zipper::*;
use crate::zipper::read_zipper_core::*;
use crate::zipper::zipper_priv::*;
pub struct TrieRefBorrowed<'a, V: Clone + Send + Sync, A: Allocator = GlobalAlloc> {
focus_node: Option<&'a TrieNodeODRc<V, A>>,
val_or_key: ValRefOrKey<'a, V>,
alloc: A
}
impl<V: Clone + Send + Sync> Default for TrieRefBorrowed<'_, V> {
fn default() -> Self {
Self::new_invalid_in(global_alloc())
}
}
#[repr(C)]
union ValRefOrKey<'a, V> {
node_key: (u8, [MaybeUninit<u8>; MAX_NODE_KEY_BYTES]),
val_ref: (u64, Option<&'a V>)
}
const VAL_SENTINEL: u64 = 0xFFFFFFFFFFFFFFFF;
const BAD_SENTINEL: u64 = 0xFEFEFEFEFEFEFEFE;
impl<V> Clone for ValRefOrKey<'_, V> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<V> Copy for ValRefOrKey<'_, V> {}
impl<V: Clone + Send + Sync, A: Allocator> Clone for TrieRefBorrowed<'_, V, A> {
#[inline]
fn clone(&self) -> Self {
Self{
focus_node: self.focus_node,
val_or_key: self.val_or_key,
alloc: self.alloc.clone(),
}
}
}
impl<V: Clone + Send + Sync, A: Allocator + Copy> Copy for TrieRefBorrowed<'_, V, A> {}
impl<'a, V: Clone + Send + Sync + 'a, A: Allocator + 'a> TrieRefBorrowed<'a, V, A> {
pub(crate) fn new_invalid_in(alloc: A) -> Self {
Self { focus_node: None, val_or_key: ValRefOrKey { val_ref: (BAD_SENTINEL, None) }, alloc }
}
pub(crate) fn new_with_node_and_path_in(root_node: &'a TrieNodeODRc<V, A>, root_val: Option<&'a V>, path: &[u8], alloc: A) -> Self {
let (node, key, val) = node_along_path(root_node, path, root_val, false);
let node_key_len = key.len();
let val_or_key = if node_key_len > 0 && node_key_len <= MAX_NODE_KEY_BYTES {
let mut node_key_bytes = [MaybeUninit::uninit(); MAX_NODE_KEY_BYTES];
unsafe {
let src_ptr = key.as_ptr();
let dst_ptr = node_key_bytes.as_mut_ptr().cast::<u8>();
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, node_key_len);
}
ValRefOrKey { node_key: (node_key_len as u8, node_key_bytes) }
} else {
if node_key_len <= MAX_NODE_KEY_BYTES {
ValRefOrKey { val_ref: (VAL_SENTINEL, val) }
} else {
ValRefOrKey { val_ref: (BAD_SENTINEL, None) }
}
};
Self { focus_node: Some(node), val_or_key, alloc }
}
pub(crate) fn new_with_key_and_path_in<'paths>(mut node: &'a TrieNodeODRc<V, A>, root_val: Option<&'a V>, node_key: &'paths [u8], mut path: &'paths [u8], alloc: A) -> Self {
let mut temp_key_buf: [MaybeUninit<u8>; MAX_NODE_KEY_BYTES] = [MaybeUninit::uninit(); MAX_NODE_KEY_BYTES];
let node_key_len = node_key.len();
let path_len = path.len();
if node_key_len > 0 && path_len > 0 {
let next_node_path = unsafe {
let src_ptr = node_key.as_ptr();
let dst_ptr = temp_key_buf.as_mut_ptr().cast::<u8>();
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, node_key_len);
let remaining_len = (MAX_NODE_KEY_BYTES - node_key_len).min(path_len);
let src_ptr = path.as_ptr();
let dst_ptr = temp_key_buf.as_mut_ptr().cast::<u8>().add(node_key_len);
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, remaining_len);
let total_buf_len = node_key_len + remaining_len;
core::slice::from_raw_parts(temp_key_buf.as_mut_ptr().cast::<u8>(), total_buf_len)
};
if let Some((consumed_byte_cnt, next_node)) = node.as_tagged().node_get_child(next_node_path) {
debug_assert!(consumed_byte_cnt >= node_key_len);
node = next_node;
path = &path[consumed_byte_cnt-node_key_len..];
} else {
path = next_node_path;
}
} else {
if path_len == 0 {
path = node_key;
}
}
let (node, key, val) = node_along_path(node, path, root_val, true);
TrieRefBorrowed::new_with_node_and_path_in(node, val, key, alloc)
}
#[inline]
fn is_valid(&self) -> bool {
(unsafe{ self.val_or_key.node_key.0 } != 0xFE)
}
#[inline]
fn node_key(&self) -> &[u8] {
let key_len = unsafe{ self.val_or_key.node_key.0 } as usize;
if key_len > MAX_NODE_KEY_BYTES {
&[]
} else {
unsafe{ slice::from_raw_parts(self.val_or_key.node_key.1.as_ptr().cast(), key_len) }
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for TrieRefBorrowed<'_, V, A> {
fn path_exists(&self) -> bool {
if self.is_valid() {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.unwrap().as_tagged().node_contains_partial_key(key)
} else {
true
}
} else {
false
}
}
fn is_val(&self) -> bool {
self.get_val().is_some()
}
fn child_count(&self) -> usize {
if self.is_valid() {
self.focus_node.unwrap().as_tagged().count_branches(self.node_key())
} else {
0
}
}
fn child_mask(&self) -> ByteMask {
if self.is_valid() {
self.focus_node.unwrap().as_tagged().node_branches_mask(self.node_key())
} else {
ByteMask::EMPTY
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for TrieRefBorrowed<'_, V, A> {
fn val(&self) -> Option<&V> {
self.get_val()
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for TrieRefBorrowed<'_, V, A> {
type ReadZipperT<'a> = ReadZipperUntracked<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
let core_z = read_zipper_core::ReadZipperCore::new_with_node_and_path_internal_in(OwnedOrBorrowed::Borrowed(self.focus_node.unwrap()), self.node_key(), 0, self.val(), self.alloc.clone());
Self::ReadZipperT::new_forked_with_inner_zipper(core_z)
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for TrieRefBorrowed<'_, V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> {
#[cfg(not(feature = "graft_root_vals"))]
let root_val = None;
#[cfg(feature = "graft_root_vals")]
let root_val = self.val().cloned();
let root_node = self.get_focus().into_option();
if root_node.is_some() || root_val.is_some() {
Some(PathMap::new_with_root_in(root_node, root_val, self.alloc.clone()))
} else {
None
}
}
}
impl<V: Clone + Send + Sync, A: Allocator> zipper_priv::ZipperPriv for TrieRefBorrowed<'_, V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> {
if self.is_valid() {
let node_key = self.node_key();
if node_key.len() > 0 {
self.focus_node.unwrap().as_tagged().get_node_at_key(self.node_key())
} else {
AbstractNodeRef::BorrowedRc(&self.focus_node.as_ref().unwrap())
}
} else {
AbstractNodeRef::None
}
}
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> {
if self.is_valid() {
let node_key = self.node_key();
if node_key.len() == 0 {
self.focus_node
} else {
match self.focus_node.unwrap().as_tagged().node_get_child(node_key) {
Some((consumed_bytes, child_node)) => {
debug_assert_eq!(consumed_bytes, node_key.len());
Some(child_node)
},
None => None
}
}
} else {
None
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlyValues<'a, V> for TrieRefBorrowed<'a, V, A> {
#[inline]
fn get_val(&self) -> Option<&'a V> {
if self.is_valid() {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.unwrap().as_tagged().node_get_val(key)
} else {
unsafe{
debug_assert_eq!(self.val_or_key.val_ref.0, VAL_SENTINEL);
self.val_or_key.val_ref.1
}
}
} else {
None
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlySubtries<'a, V, A> for TrieRefBorrowed<'a, V, A> {
type TrieRefT = TrieRefBorrowed<'a, V, A>;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> TrieRefBorrowed<'a, V, A> {
if self.is_valid() {
let path = path.as_ref();
let node_key = self.node_key();
if node_key.len() > 0 {
Self::new_with_key_and_path_in(self.focus_node.unwrap(), None, node_key, path, self.alloc.clone())
} else {
Self::new_with_key_and_path_in(self.focus_node.unwrap(), self.get_val(), &[], path, self.alloc.clone())
}
} else {
Self::new_invalid_in(self.alloc.clone())
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for TrieRefBorrowed<'_, V, A> {
fn shared_node_id(&self) -> Option<u64> {
read_zipper_core::read_zipper_shared_node_id(self)
}
fn is_shared(&self) -> bool {
false }
}
impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperReadOnlyPriv<'a, V, A> for TrieRefBorrowed<'a, V, A> {
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'a, V, A>, &'z [u8], Option<&'a V>) {
(self.focus_node.unwrap().as_tagged(), self.node_key(), self.get_val())
}
fn take_core(&mut self) -> Option<read_zipper_core::ReadZipperCore<'a, 'static, V, A>> {
None
}
}
#[derive(Clone)]
pub struct TrieRefOwned<V: Clone + Send + Sync, A: Allocator = GlobalAlloc> {
focus_node: Option<TrieNodeODRc<V, A>>,
val_or_key: ValOrKey<V>,
alloc: A,
}
#[repr(C)]
union ValOrKey<V> {
node_key: (u8, [MaybeUninit<u8>; MAX_NODE_KEY_BYTES]),
val: (u64, core::mem::ManuallyDrop<Option<V>>)
}
impl<V> Drop for ValOrKey<V> {
fn drop(&mut self) {
if self.is_val() {
unsafe{ core::mem::ManuallyDrop::drop(&mut self.val.1) }
}
}
}
impl<V: Clone> Clone for ValOrKey<V> {
fn clone(&self) -> Self {
if self.is_val() {
let val_ref = unsafe{ &self.val.1 } as &Option<V>;
ValOrKey { val: (VAL_SENTINEL, core::mem::ManuallyDrop::new(val_ref.clone())) }
} else {
ValOrKey { node_key: unsafe{ self.node_key } }
}
}
}
impl<V> ValOrKey<V> {
#[inline]
fn is_val(&self) -> bool {
if unsafe{ self.node_key.0 } == 0xFF {
debug_assert_eq!(VAL_SENTINEL, unsafe{ self.val.0 });
true
} else {
false
}
}
}
impl<V: Clone + Send + Sync, A: Allocator> TrieRefOwned<V, A> {
pub(crate) fn new_invalid_in(alloc: A) -> Self {
Self { focus_node: None, val_or_key: ValOrKey { val: (BAD_SENTINEL, core::mem::ManuallyDrop::new(None)) }, alloc }
}
pub(crate) fn new_with_node_and_path_in(parent_node: &TrieNodeODRc<V, A>, root_val: Option<&V>, path: &[u8], alloc: A) -> Self {
let (node, key, val) = node_along_path(parent_node, path, root_val, false);
let node_key_len = key.len();
let val_or_key = if node_key_len > 0 && node_key_len <= MAX_NODE_KEY_BYTES {
let mut node_key_bytes = [MaybeUninit::uninit(); MAX_NODE_KEY_BYTES];
unsafe {
let src_ptr = key.as_ptr();
let dst_ptr = node_key_bytes.as_mut_ptr().cast::<u8>();
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, node_key_len);
}
ValOrKey { node_key: (node_key_len as u8, node_key_bytes) }
} else {
if node_key_len <= MAX_NODE_KEY_BYTES {
let val = val.cloned();
ValOrKey { val: (VAL_SENTINEL, core::mem::ManuallyDrop::new(val)) }
} else {
ValOrKey { val: (BAD_SENTINEL, core::mem::ManuallyDrop::new(None)) }
}
};
Self { focus_node: Some(node.clone()), val_or_key, alloc }
}
pub(crate) fn new_with_key_and_path_in<'a, 'paths>(mut node: &TrieNodeODRc<V, A>, root_val: Option<&'a V>, node_key: &'paths [u8], mut path: &'paths [u8], alloc: A) -> Self {
let mut temp_key_buf: [MaybeUninit<u8>; MAX_NODE_KEY_BYTES] = [MaybeUninit::uninit(); MAX_NODE_KEY_BYTES];
let node_key_len = node_key.len();
let path_len = path.len();
if node_key_len > 0 && path_len > 0 {
let next_node_path = unsafe {
let src_ptr = node_key.as_ptr();
let dst_ptr = temp_key_buf.as_mut_ptr().cast::<u8>();
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, node_key_len);
let remaining_len = (MAX_NODE_KEY_BYTES - node_key_len).min(path_len);
let src_ptr = path.as_ptr();
let dst_ptr = temp_key_buf.as_mut_ptr().cast::<u8>().add(node_key_len);
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, remaining_len);
let total_buf_len = node_key_len + remaining_len;
core::slice::from_raw_parts(temp_key_buf.as_mut_ptr().cast::<u8>(), total_buf_len)
};
if let Some((consumed_byte_cnt, next_node)) = node.as_tagged().node_get_child(next_node_path) {
debug_assert!(consumed_byte_cnt >= node_key_len);
node = next_node;
path = &path[consumed_byte_cnt-node_key_len..];
} else {
path = next_node_path;
}
} else {
if path_len == 0 {
path = node_key;
}
}
let (node, key, val) = node_along_path(node, path, root_val, true);
TrieRefOwned::new_with_node_and_path_in(node, val, key, alloc)
}
#[inline]
fn is_valid(&self) -> bool {
(unsafe{ self.val_or_key.node_key.0 } != 0xFE)
}
#[inline]
fn node_key(&self) -> &[u8] {
if self.val_or_key.is_val() {
&[]
} else {
let key_len = unsafe{ self.val_or_key.node_key.0 } as usize;
unsafe{ slice::from_raw_parts(self.val_or_key.node_key.1.as_ptr().cast(), key_len) }
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for TrieRefOwned<V, A> {
fn path_exists(&self) -> bool {
if self.is_valid() {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.as_ref().unwrap().as_tagged().node_contains_partial_key(key)
} else {
true
}
} else {
false
}
}
fn is_val(&self) -> bool {
self.val().is_some()
}
fn child_count(&self) -> usize {
if self.is_valid() {
self.focus_node.as_ref().unwrap().as_tagged().count_branches(self.node_key())
} else {
0
}
}
fn child_mask(&self) -> ByteMask {
if self.is_valid() {
self.focus_node.as_ref().unwrap().as_tagged().node_branches_mask(self.node_key())
} else {
ByteMask::EMPTY
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for TrieRefOwned<V, A> {
fn val(&self) -> Option<&V> {
if self.is_valid() {
let key = self.node_key();
if key.len() > 0 {
self.focus_node.as_ref().unwrap().as_tagged().node_get_val(key)
} else {
unsafe{
debug_assert_eq!(self.val_or_key.val.0, VAL_SENTINEL);
let option_ref: &Option<V> = &self.val_or_key.val.1;
option_ref.as_ref()
}
}
} else {
None
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for TrieRefOwned<V, A> {
type ReadZipperT<'a> = ReadZipperUntracked<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
let core_z = read_zipper_core::ReadZipperCore::new_with_node_and_path_internal_in(OwnedOrBorrowed::Borrowed(self.focus_node.as_ref().unwrap()), self.node_key(), 0, self.val(), self.alloc.clone());
Self::ReadZipperT::new_forked_with_inner_zipper(core_z)
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for TrieRefOwned<V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> {
#[cfg(not(feature = "graft_root_vals"))]
let root_val = None;
#[cfg(feature = "graft_root_vals")]
let root_val = self.val().cloned();
let root_node = self.get_focus().into_option();
if root_node.is_some() || root_val.is_some() {
Some(PathMap::new_with_root_in(root_node, root_val, self.alloc.clone()))
} else {
None
}
}
}
impl<V: Clone + Send + Sync, A: Allocator> zipper_priv::ZipperPriv for TrieRefOwned<V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> {
if self.is_valid() {
let node_key = self.node_key();
if node_key.len() > 0 {
self.focus_node.as_ref().unwrap().as_tagged().get_node_at_key(self.node_key())
} else {
AbstractNodeRef::BorrowedRc(&self.focus_node.as_ref().unwrap())
}
} else {
AbstractNodeRef::None
}
}
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> {
if self.is_valid() {
let node_key = self.node_key();
if node_key.len() == 0 {
self.focus_node.as_ref()
} else {
match self.focus_node.as_ref().unwrap().as_tagged().node_get_child(node_key) {
Some((consumed_bytes, child_node)) => {
debug_assert_eq!(consumed_bytes, node_key.len());
Some(child_node)
},
None => None
}
}
} else {
None
}
}
}
pub struct TrieRefWitness<V: Clone + Send + Sync, A: Allocator>(TrieRefOwned<V, A>);
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlyConditionalValues<'a, V> for TrieRefOwned<V, A> {
type WitnessT = TrieRefWitness<V, A>;
fn witness<'w>(&self) -> Self::WitnessT {
TrieRefWitness(self.clone())
}
#[inline]
fn get_val_with_witness<'w>(&self, witness: &'w TrieRefWitness<V, A>) -> Option<&'w V> where 'a: 'w {
if self.is_valid() {
debug_assert_eq!(witness.0.focus_node, self.focus_node);
witness.0.val()
} else {
None
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlySubtries<'a, V, A> for TrieRefOwned<V, A> {
type TrieRefT = TrieRefOwned<V, A>;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> TrieRefOwned<V, A> {
if self.is_valid() {
let path = path.as_ref();
let node_key = self.node_key();
Self::new_with_key_and_path_in(self.focus_node.as_ref().unwrap(), self.val(), node_key, path, self.alloc.clone())
} else {
Self::new_invalid_in(self.alloc.clone())
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for TrieRefOwned<V, A> {
fn shared_node_id(&self) -> Option<u64> {
read_zipper_core::read_zipper_shared_node_id(self)
}
fn is_shared(&self) -> bool {
false }
}
impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperReadOnlyPriv<'a, V, A> for TrieRefOwned<V, A> {
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'z, V, A>, &'z [u8], Option<&'z V>) {
let node = self.focus_node.as_ref().unwrap().as_tagged();
(node, self.node_key(), self.val())
}
fn take_core(&mut self) -> Option<read_zipper_core::ReadZipperCore<'a, 'static, V, A>> {
None
}
}
#[derive(Clone)]
pub enum TrieRef<'a, V: Clone + Send + Sync, A: Allocator = GlobalAlloc> {
Borrowed(TrieRefBorrowed<'a, V, A>),
Owned(TrieRefOwned<V, A>)
}
impl<'a, V: Clone + Send + Sync, A: Allocator> From<TrieRefBorrowed<'a, V, A>> for TrieRef<'a, V, A> {
fn from(src: TrieRefBorrowed<'a, V, A>) -> Self {
TrieRef::Borrowed(src)
}
}
impl<V: Clone + Send + Sync, A: Allocator> From<TrieRefOwned<V, A>> for TrieRef<'_, V, A> {
fn from(src: TrieRefOwned<V, A>) -> Self {
TrieRef::Owned(src)
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> Zipper for TrieRef<'_, V, A> {
fn path_exists(&self) -> bool {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.path_exists(),
TrieRef::Owned(trie_ref) => trie_ref.path_exists(),
}
}
fn is_val(&self) -> bool {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.is_val(),
TrieRef::Owned(trie_ref) => trie_ref.is_val(),
}
}
fn child_count(&self) -> usize {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.child_count(),
TrieRef::Owned(trie_ref) => trie_ref.child_count(),
}
}
fn child_mask(&self) -> ByteMask {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.child_mask(),
TrieRef::Owned(trie_ref) => trie_ref.child_mask(),
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperValues<V> for TrieRef<'_, V, A> {
fn val(&self) -> Option<&V> {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.val(),
TrieRef::Owned(trie_ref) => trie_ref.val(),
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperForking<V> for TrieRef<'_, V, A> {
type ReadZipperT<'a> = ReadZipperUntracked<'a, 'a, V, A> where Self: 'a;
fn fork_read_zipper<'a>(&'a self) -> Self::ReadZipperT<'a> {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.fork_read_zipper(),
TrieRef::Owned(trie_ref) => trie_ref.fork_read_zipper(),
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperSubtries<V, A> for TrieRef<'_, V, A> {
fn make_map(&self) -> Option<PathMap<Self::V, A>> {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.make_map(),
TrieRef::Owned(trie_ref) => trie_ref.make_map(),
}
}
}
impl<V: Clone + Send + Sync, A: Allocator> zipper_priv::ZipperPriv for TrieRef<'_, V, A> {
type V = V;
type A = A;
fn get_focus(&self) -> AbstractNodeRef<'_, Self::V, Self::A> {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.get_focus(),
TrieRef::Owned(trie_ref) => trie_ref.get_focus(),
}
}
fn try_borrow_focus(&self) -> Option<&TrieNodeODRc<Self::V, Self::A>> {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.try_borrow_focus(),
TrieRef::Owned(trie_ref) => trie_ref.try_borrow_focus(),
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlyConditionalValues<'a, V> for TrieRef<'a, V, A> {
type WitnessT = TrieRefWitness<V, A>;
fn witness<'w>(&self) -> Self::WitnessT {
match self {
TrieRef::Borrowed(trie_ref) => TrieRefWitness(TrieRefOwned::new_invalid_in(trie_ref.alloc.clone())),
TrieRef::Owned(trie_ref) => trie_ref.witness(),
}
}
#[inline]
fn get_val_with_witness<'w>(&self, witness: &'w TrieRefWitness<V, A>) -> Option<&'w V> where 'a: 'w {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.get_val(),
TrieRef::Owned(trie_ref) => trie_ref.get_val_with_witness(witness),
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin + 'a, A: Allocator + 'a> ZipperReadOnlySubtries<'a, V, A> for TrieRef<'a, V, A> {
type TrieRefT = TrieRef<'a, V, A>;
fn trie_ref_at_path<K: AsRef<[u8]>>(&self, path: K) -> TrieRef<'a, V, A> {
match self {
TrieRef::Borrowed(trie_ref) => TrieRef::Borrowed(trie_ref.trie_ref_at_path(path)),
TrieRef::Owned(trie_ref) => TrieRef::Owned(trie_ref.trie_ref_at_path(path)),
}
}
}
impl<V: Clone + Send + Sync + Unpin, A: Allocator> ZipperConcrete for TrieRef<'_, V, A> {
fn shared_node_id(&self) -> Option<u64> {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.shared_node_id(),
TrieRef::Owned(trie_ref) => trie_ref.shared_node_id(),
}
}
fn is_shared(&self) -> bool {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.is_shared(),
TrieRef::Owned(trie_ref) => trie_ref.is_shared(),
}
}
}
impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperReadOnlyPriv<'a, V, A> for TrieRef<'a, V, A> {
fn borrow_raw_parts<'z>(&'z self) -> (TaggedNodeRef<'z, V, A>, &'z [u8], Option<&'z V>) {
match self {
TrieRef::Borrowed(trie_ref) => trie_ref.borrow_raw_parts(),
TrieRef::Owned(trie_ref) => trie_ref.borrow_raw_parts(),
}
}
fn take_core(&mut self) -> Option<read_zipper_core::ReadZipperCore<'a, 'static, V, A>> {
None
}
}
macro_rules! impl_trie_ref_debug {
(impl $($impl_tail:tt)*) => {
impl $($impl_tail)* {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct(core::any::type_name::<Self>())
.field("child_mask", &self.child_mask())
.field("shared_node_id", &self.shared_node_id())
.finish()
}
}
};
}
impl_trie_ref_debug!(
impl<V: Clone + Send + Sync + Unpin, A: Allocator> core::fmt::Debug for TrieRefBorrowed<'_, V, A>
);
impl_trie_ref_debug!(
impl<V: Clone + Send + Sync + Unpin, A: Allocator> core::fmt::Debug for TrieRefOwned<V, A>
);
impl_trie_ref_debug!(
impl<V: Clone + Send + Sync + Unpin, A: Allocator> core::fmt::Debug for TrieRef<'_, V, A>
);
#[cfg(test)]
mod tests {
use crate::{trie_node::AbstractNodeRef, utils::ByteMask, zipper::{zipper_priv::ZipperPriv, *}, PathMap};
#[test]
fn trie_ref_test1() {
let keys = ["Hello", "Hell", "Help", "Helsinki"];
let map: PathMap<()> = keys.iter().map(|k| (k, ())).collect();
let trie_ref = map.trie_ref_at_path(b"He");
#[cfg(not(feature = "all_dense_nodes"))]
assert_eq!(trie_ref.node_key(), b"He");
assert_eq!(trie_ref.val(), None);
assert_eq!(trie_ref.path_exists(), true);
let trie_ref = map.trie_ref_at_path(b"Hel");
let node = trie_ref.get_focus();
assert!(matches!(node, AbstractNodeRef::BorrowedRc(_)));
assert_eq!(trie_ref.val(), None);
assert_eq!(trie_ref.path_exists(), true);
let trie_ref = map.trie_ref_at_path(b"Help");
assert_eq!(trie_ref.val(), Some(&()));
assert_eq!(trie_ref.path_exists(), true);
let trie_ref = map.trie_ref_at_path(b"Hell");
assert_eq!(trie_ref.val(), Some(&()));
assert_eq!(trie_ref.path_exists(), true);
let trie_ref = map.trie_ref_at_path(b"Hi");
assert_eq!(trie_ref.val(), None);
assert_eq!(trie_ref.path_exists(), false);
let trie_ref = map.trie_ref_at_path(b"Hello Mr. Washington, my name is John, but sometimes people call me Jack. I live in Springfield.");
assert_eq!(trie_ref.val(), None);
assert_eq!(trie_ref.path_exists(), false);
let trie_ref0 = map.trie_ref_at_path(b"H");
assert_eq!(trie_ref0.val(), None);
assert_eq!(trie_ref0.path_exists(), true);
assert_eq!(trie_ref0.child_count(), 1);
assert_eq!(trie_ref0.child_mask(), ByteMask::from(b'e'));
let trie_ref1 = trie_ref0.trie_ref_at_path(b"el");
assert_eq!(trie_ref1.val(), None);
assert_eq!(trie_ref1.path_exists(), true);
assert_eq!(trie_ref1.child_count(), 3);
let trie_ref2 = trie_ref1.trie_ref_at_path(b"lo");
assert_eq!(trie_ref2.val(), Some(&()));
assert_eq!(trie_ref2.path_exists(), true);
assert_eq!(trie_ref2.child_count(), 0);
let trie_ref3 = trie_ref2.trie_ref_at_path(b"Operator");
assert_eq!(trie_ref3.val(), None);
assert_eq!(trie_ref3.path_exists(), false);
assert_eq!(trie_ref3.child_count(), 0);
let trie_ref4 = trie_ref3.trie_ref_at_path(b", give me number 9");
assert_eq!(trie_ref4.val(), None);
assert_eq!(trie_ref4.path_exists(), false);
assert_eq!(trie_ref4.child_count(), 0);
let trie_ref2 = trie_ref1.trie_ref_at_path(b"l");
assert_eq!(trie_ref2.val(), Some(&()));
assert_eq!(trie_ref2.path_exists(), true);
assert_eq!(trie_ref2.child_count(), 1);
}
#[test]
fn trie_ref_test2() {
let rs = ["arrow", "bow", "cannon", "roman", "romane", "romanus^", "romulus", "rubens", "ruber", "rubicon", "rubicundus", "rom'i"];
let btm: PathMap<usize> = rs.into_iter().enumerate().map(|(i, r)| (r.as_bytes(), i) ).collect();
let trie_ref = btm.trie_ref_at_path([]);
assert_eq!(trie_ref.val(), None);
assert_eq!(trie_ref.path_exists(), true);
assert_eq!(trie_ref.child_count(), 4);
let trie_ref = trie_ref.trie_ref_at_path([b'a']);
assert_eq!(trie_ref.val(), None);
assert_eq!(trie_ref.path_exists(), true);
assert_eq!(trie_ref.child_count(), 1);
let trie_ref = btm.trie_ref_at_path(b"r");
let mut z = trie_ref.fork_read_zipper();
assert_eq!(z.val_count(), 9);
z.descend_to(b"om");
assert_eq!(z.val_count(), 5);
assert_eq!(z.path(), b"om");
assert_eq!(z.origin_path(), b"om");
let new_map = trie_ref.make_map().unwrap();
assert_eq!(new_map.val_count(), 9);
}
#[test]
fn trie_ref_test3() {
let mut map = PathMap::<usize>::new();
map.set_val_at(b"path", 42);
let zh = map.zipper_head();
let rz = zh.read_zipper_at_borrowed_path(b"path").unwrap();
let tr = rz.trie_ref_at_path(b"");
assert_eq!(tr.val(), Some(&42));
drop(rz);
assert_eq!(tr.val(), Some(&42));
let mut wz = zh.write_zipper_at_exclusive_path(b"path").unwrap();
wz.set_val(24);
assert_eq!(wz.val(), Some(&24));
assert_eq!(tr.val(), Some(&42));
drop(wz);
let rz = zh.read_zipper_at_path(b"path").unwrap();
assert_eq!(rz.val(), Some(&24));
drop(rz);
let mut rz = zh.read_zipper_at_path(b"").unwrap();
rz.descend_to(b"path");
assert_eq!(rz.path_exists(), true);
assert_eq!(rz.val(), Some(&24));
drop(rz);
assert_eq!(tr.val(), Some(&42));
let mut wz = zh.write_zipper_at_exclusive_path(b"path").unwrap();
wz.remove_val(true);
drop(wz);
drop(zh);
assert_eq!(map.get_val_at(b"path"), None);
assert_eq!(tr.val(), Some(&42));
}
}