1use crate::collections::btree_map::internal_node::InternalBTreeNode;
2use crate::collections::btree_map::iter::SBTreeMapIter;
3use crate::collections::btree_map::leaf_node::LeafBTreeNode;
4use crate::collections::btree_map::{BTreeNode, LeveledList, SBTreeMap};
5use crate::encoding::AsFixedSizeBytes;
6use crate::primitive::s_ref::SRef;
7use crate::primitive::s_ref_mut::SRefMut;
8use crate::primitive::StableType;
9use crate::utils::certification::{
10 empty_hash, labeled, labeled_hash, pruned, AsHashTree, AsHashableBytes, Hash, HashForker,
11 HashTree, WitnessForker,
12};
13use std::borrow::Borrow;
14use std::fmt::{Debug, Formatter};
15use std::ops::Deref;
16
17pub struct SCertifiedBTreeMap<
187 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
188 V: StableType + AsFixedSizeBytes + AsHashTree,
189> {
190 pub(crate) inner: SBTreeMap<K, V>,
191 modified: LeveledList,
192 uncommited: bool,
193}
194
195impl<
196 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
197 V: StableType + AsFixedSizeBytes + AsHashTree,
198 > SCertifiedBTreeMap<K, V>
199{
200 #[inline]
204 pub fn new() -> Self {
205 Self {
206 inner: SBTreeMap::new_certified(),
207 modified: LeveledList::new(),
208 uncommited: false,
209 }
210 }
211
212 #[inline]
219 pub fn insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
220 let res = self.inner._insert(key, value, &mut self.modified);
221
222 if res.is_ok() && !self.uncommited {
223 self.uncommited = true;
224 }
225
226 res
227 }
228
229 #[inline]
234 pub fn insert_and_commit(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
235 let it = self.insert(key, value)?;
236 self.commit();
237
238 Ok(it)
239 }
240
241 #[inline]
248 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
249 where
250 K: Borrow<Q>,
251 Q: Ord + ?Sized,
252 {
253 if !self.uncommited {
254 self.uncommited = true;
255 }
256
257 self.inner._remove(key, &mut self.modified)
258 }
259
260 #[inline]
265 pub fn remove_and_commit<Q>(&mut self, key: &Q) -> Option<V>
266 where
267 K: Borrow<Q>,
268 Q: Ord + ?Sized,
269 {
270 let it = self.remove(key);
271 self.commit();
272
273 it
274 }
275
276 #[inline]
279 pub fn clear(&mut self) {
280 self.uncommited = false;
281 self.modified = LeveledList::new();
282
283 self.inner.clear();
284 }
285
286 #[inline]
288 pub fn get<Q>(&self, key: &Q) -> Option<SRef<'_, V>>
289 where
290 K: Borrow<Q>,
291 Q: Ord + ?Sized,
292 {
293 self.inner.get(key)
294 }
295
296 #[inline]
298 pub fn get_random_key(&self, seed: u32) -> Option<SRef<K>> {
299 self.inner.get_random_key(seed)
300 }
301
302 #[inline]
306 pub fn with_key<Q, R, F: FnOnce(Option<SRefMut<V>>) -> R>(&mut self, key: &Q, f: F) -> R
307 where
308 K: Borrow<Q>,
309 Q: Ord + ?Sized,
310 {
311 let val = self.inner._get_mut(key, &mut self.modified);
312
313 if val.is_some() && !self.uncommited {
314 self.uncommited = true;
315 }
316
317 let res = f(val);
318
319 self.commit();
320
321 res
322 }
323
324 #[inline]
326 pub fn contains_key<Q>(&self, key: &Q) -> bool
327 where
328 K: Borrow<Q>,
329 Q: Ord + ?Sized,
330 {
331 self.inner.contains_key(key)
332 }
333
334 #[inline]
336 pub fn len(&self) -> u64 {
337 self.inner.len()
338 }
339
340 #[inline]
342 pub fn is_empty(&self) -> bool {
343 self.inner.is_empty()
344 }
345
346 #[inline]
348 pub fn iter(&self) -> SBTreeMapIter<'_, K, V> {
349 self.inner.iter()
350 }
351
352 pub fn commit(&mut self) {
362 if !self.uncommited {
363 return;
364 }
365 self.uncommited = false;
366
367 while let Some(ptr) = self.modified.pop() {
368 let mut node = BTreeNode::<K, V>::from_ptr(ptr);
369 match &mut node {
370 BTreeNode::Internal(n) => n.commit::<V>(),
371 BTreeNode::Leaf(n) => n.commit(),
372 };
373 }
374 }
375
376 pub fn prove_absence<Q>(&self, index: &Q) -> HashTree
389 where
390 K: Borrow<Q>,
391 Q: Ord + ?Sized,
392 {
393 assert!(!self.uncommited);
394
395 let root_opt = self.inner.get_root();
396 if root_opt.is_none() {
397 return HashTree::Empty;
398 }
399
400 let node = unsafe { root_opt.unwrap_unchecked() };
401 match node {
402 BTreeNode::Internal(n) => match n.prove_absence::<V, Q>(index) {
403 Ok(w) => w,
404 Err(w) => w,
405 },
406 BTreeNode::Leaf(n) => {
407 let len = n.read_len();
408 let idx = match n.binary_search(index, len) {
409 Ok(_) => panic!("The key is present!"),
410 Err(idx) => idx,
411 };
412
413 match n.prove_absence(idx, len) {
414 Ok(w) => w,
415 Err(w) => w,
416 }
417 }
418 }
419 }
420
421 pub fn prove_range<Q>(&self, from: &Q, to: &Q) -> HashTree
429 where
430 K: Borrow<Q>,
431 Q: Ord + ?Sized,
432 {
433 assert!(!self.uncommited);
434 assert!(from.le(to));
435
436 let root_opt = self.inner.get_root();
437 if root_opt.is_none() {
438 return HashTree::Empty;
439 }
440
441 let node = unsafe { root_opt.unwrap_unchecked() };
442 match node {
443 BTreeNode::Internal(n) => n.prove_range::<V, Q>(from, to),
444 BTreeNode::Leaf(n) => n.prove_range(from, to),
445 }
446 }
447
448 pub fn witness_with<Q, Fn: FnMut(&V) -> HashTree>(&self, index: &Q, f: Fn) -> HashTree
458 where
459 K: Borrow<Q>,
460 Q: Ord + ?Sized,
461 {
462 assert!(!self.uncommited);
463
464 let root_opt = self.inner.get_root();
465 if root_opt.is_none() {
466 return HashTree::Empty;
467 }
468
469 let node = unsafe { root_opt.unwrap_unchecked() };
470 witness_node(&node, index, f)
471 }
472
473 #[inline]
480 pub fn witness<Q>(&self, index: &Q) -> HashTree
481 where
482 K: Borrow<Q>,
483 Q: Ord + ?Sized,
484 {
485 self.witness_with(index, |value| value.hash_tree())
486 }
487}
488
489impl<
490 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
491 V: StableType + AsFixedSizeBytes + AsHashTree,
492 > AsHashTree for SCertifiedBTreeMap<K, V>
493{
494 #[inline]
495 fn root_hash(&self) -> Hash {
496 self.inner
497 .get_root()
498 .map(|it| match it {
499 BTreeNode::Internal(n) => n.root_hash(),
500 BTreeNode::Leaf(n) => n.root_hash(),
501 })
502 .unwrap_or_else(empty_hash)
503 }
504
505 fn hash_tree(&self) -> HashTree {
515 assert!(!self.uncommited);
516
517 let e1 = self.inner.iter().next();
518 let e2 = self.inner.iter().rev().next();
519
520 match (e1, e2) {
521 (None, None) => HashTree::Empty,
522 (Some((k1, _)), Some((k2, _))) => self.prove_range(k1.deref(), k2.deref()),
523 _ => unreachable!(),
524 }
525 }
526}
527
528fn witness_node<
529 Q,
530 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
531 V: StableType + AsFixedSizeBytes + AsHashTree,
532 Fn: FnMut(&V) -> HashTree,
533>(
534 node: &BTreeNode<K, V>,
535 k: &Q,
536 f: Fn,
537) -> HashTree
538where
539 K: Borrow<Q>,
540 Q: Ord + ?Sized,
541{
542 match node {
543 BTreeNode::Internal(n) => {
544 let len = n.read_len();
545 let idx = match n.binary_search(k, len) {
546 Ok(idx) => idx + 1,
547 Err(idx) => idx,
548 };
549
550 let child =
551 BTreeNode::<K, V>::from_ptr(u64::from_fixed_size_bytes(&n.read_child_ptr_buf(idx)));
552
553 n.witness_with_replacement::<V>(idx, witness_node(&child, k, f), len)
554 }
555 BTreeNode::Leaf(n) => n.witness_with(k, f),
556 }
557}
558
559impl<
560 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes + Debug,
561 V: StableType + AsFixedSizeBytes + AsHashTree + Debug,
562 > SCertifiedBTreeMap<K, V>
563{
564 #[inline]
565 pub fn debug_print(&self) {
566 self.inner.debug_print();
567 self.modified.debug_print();
568 }
569}
570
571impl<
572 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
573 V: StableType + AsFixedSizeBytes + AsHashTree,
574 > Default for SCertifiedBTreeMap<K, V>
575{
576 #[inline]
577 fn default() -> Self {
578 Self::new()
579 }
580}
581
582impl<
583 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
584 V: StableType + AsFixedSizeBytes + AsHashTree,
585 > AsFixedSizeBytes for SCertifiedBTreeMap<K, V>
586{
587 const SIZE: usize = SBTreeMap::<K, V>::SIZE;
588 type Buf = <SBTreeMap<K, V> as AsFixedSizeBytes>::Buf;
589
590 fn as_fixed_size_bytes(&self, buf: &mut [u8]) {
591 assert!(!self.uncommited);
592
593 self.inner.as_fixed_size_bytes(buf)
594 }
595
596 fn from_fixed_size_bytes(buf: &[u8]) -> Self {
597 let mut inner = SBTreeMap::<K, V>::from_fixed_size_bytes(buf);
598 inner.set_certified(true);
599
600 Self {
601 inner,
602 modified: LeveledList::new(),
603 uncommited: false,
604 }
605 }
606}
607
608impl<
609 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
610 V: StableType + AsFixedSizeBytes + AsHashTree,
611 > StableType for SCertifiedBTreeMap<K, V>
612{
613 #[inline]
614 unsafe fn stable_drop_flag_on(&mut self) {
615 self.inner.stable_drop_flag_on();
616 }
617
618 #[inline]
619 unsafe fn stable_drop_flag_off(&mut self) {
620 self.inner.stable_drop_flag_off();
621 }
622}
623
624impl<
625 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes + Debug,
626 V: StableType + AsFixedSizeBytes + AsHashTree + Debug,
627 > Debug for SCertifiedBTreeMap<K, V>
628{
629 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
630 f.write_str("{")?;
631 for (idx, (k, v)) in self.iter().enumerate() {
632 k.fmt(f)?;
633 f.write_str(": ")?;
634 v.fmt(f)?;
635
636 if idx < (self.len() - 1) as usize {
637 f.write_str(", ")?;
638 }
639 }
640 f.write_str("}")
641 }
642}
643
644impl<
645 K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes,
646 V: StableType + AsFixedSizeBytes + AsHashTree,
647 > LeafBTreeNode<K, V>
648{
649 pub(crate) fn commit(&mut self) {
650 let len = self.read_len();
651
652 let mut hash = HashForker::default();
653
654 for i in 0..len {
655 let k = self.get_key(i);
656 let v = self.get_value(i);
657
658 hash.fork_with(labeled_hash(&k.as_hashable_bytes(), &v.root_hash()));
659 }
660
661 self.write_root_hash(&hash.finish(), true);
662 }
663
664 #[inline]
665 pub(crate) fn root_hash(&self) -> Hash {
666 self.read_root_hash(true)
667 }
668
669 pub(crate) fn prove_absence(&self, index: usize, len: usize) -> Result<HashTree, HashTree> {
670 let mut witness = WitnessForker::default();
671
672 let from = index as isize - 1;
673 let to = index;
674
675 for i in 0..len {
676 let k = self.get_key(i);
677 let v = self.get_value(i);
678
679 let rh = if i == from as usize || i == to {
681 labeled(k.as_hashable_bytes(), pruned(v.root_hash()))
682 } else {
683 pruned(labeled_hash(&k.as_hashable_bytes(), &v.root_hash()))
684 };
685
686 witness.fork_with(rh);
687 }
688
689 if to == len && len != 0 {
690 Err(witness.finish())
691 } else {
692 Ok(witness.finish())
693 }
694 }
695
696 pub(crate) fn prove_range<Q>(&self, from: &Q, to: &Q) -> HashTree
697 where
698 K: Borrow<Q>,
699 Q: Ord + ?Sized,
700 {
701 let len = self.read_len();
702
703 if len == 0 {
704 return HashTree::Empty;
705 }
706
707 let from_idx = match self.binary_search(from, len) {
708 Ok(idx) => idx,
709 Err(idx) => idx,
710 };
711
712 let to_idx = match self.binary_search(to, len) {
713 Ok(idx) => idx,
714 Err(idx) => idx,
715 };
716
717 let mut witness = WitnessForker::default();
718
719 for i in 0..from_idx {
720 let k = self.get_key(i);
721 let v = self.get_value(i);
722
723 witness.fork_with(pruned(labeled_hash(&k.as_hashable_bytes(), &v.root_hash())));
724 }
725
726 for i in from_idx..(to_idx + 1).min(len) {
727 let k = self.get_key(i);
728 let v = self.get_value(i);
729
730 witness.fork_with(labeled(k.as_hashable_bytes(), pruned(v.root_hash())));
731 }
732
733 for i in (to_idx + 1)..len {
734 let k = self.get_key(i);
735 let v = self.get_value(i);
736
737 witness.fork_with(pruned(labeled_hash(&k.as_hashable_bytes(), &v.root_hash())));
738 }
739
740 witness.finish()
741 }
742
743 pub(crate) fn witness_with<Q, Fn: FnMut(&V) -> HashTree>(
744 &self,
745 index: &Q,
746 mut f: Fn,
747 ) -> HashTree
748 where
749 K: Borrow<Q>,
750 Q: Ord + ?Sized,
751 {
752 let len = self.read_len();
753
754 assert!(len > 0, "The key is NOT present!");
755
756 let index = match self.binary_search(index, len) {
757 Ok(idx) => idx,
758 Err(_) => panic!("The key is NOT present!"),
759 };
760
761 let mut witness = WitnessForker::default();
762
763 for i in 0..len {
764 let k = self.get_key(i);
765 let v = self.get_value(i);
766
767 let rh = if i == index {
768 labeled(k.as_hashable_bytes(), f(&v))
769 } else {
770 pruned(labeled_hash(&k.as_hashable_bytes(), &v.root_hash()))
771 };
772
773 witness.fork_with(rh);
774 }
775
776 witness.finish()
777 }
778}
779
780impl<K: StableType + AsFixedSizeBytes + Ord + AsHashableBytes> InternalBTreeNode<K> {
781 pub(crate) fn commit<V: StableType + AsFixedSizeBytes + AsHashTree>(&mut self) {
782 let len = self.read_len();
783 let mut hash = HashForker::default();
784
785 for i in 0..(len + 1) {
786 hash.fork_with(self.read_child_root_hash::<V>(i, true));
787 }
788
789 self.write_root_hash(&hash.finish(), true);
790 }
791
792 #[inline]
793 pub(crate) fn root_hash(&self) -> Hash {
794 self.read_root_hash(true)
795 }
796
797 pub(crate) fn prove_absence<V: StableType + AsFixedSizeBytes + AsHashTree, Q>(
798 &self,
799 key: &Q,
800 ) -> Result<HashTree, HashTree>
801 where
802 K: Borrow<Q>,
803 Q: Ord + ?Sized,
804 {
805 let len = self.read_len();
806
807 debug_assert!(len > 0);
808
809 let index = match self.binary_search(key, len) {
810 Ok(_) => panic!("The key is present!"),
811 Err(idx) => idx,
812 };
813
814 let mut witness = WitnessForker::default();
815
816 let mut i = 0;
817 loop {
818 if i == len + 1 {
819 break;
820 }
821
822 let mut ptr = u64::from_fixed_size_bytes(&self.read_child_ptr_buf(i));
823 let mut child = BTreeNode::<K, V>::from_ptr(ptr);
824
825 let result = if i == index {
826 match child {
827 BTreeNode::Internal(n) => n.prove_absence::<V, Q>(key),
828 BTreeNode::Leaf(n) => {
829 let len = n.read_len();
830 let idx = match n.binary_search(key, len) {
831 Ok(_) => panic!("The key is present!"),
832 Err(idx) => idx,
833 };
834
835 n.prove_absence(idx, len)
836 }
837 }
838 } else {
839 match child {
840 BTreeNode::Internal(n) => Ok(HashTree::Pruned(n.read_root_hash(true))),
841 BTreeNode::Leaf(n) => Ok(HashTree::Pruned(n.read_root_hash(true))),
842 }
843 };
844
845 match result {
846 Ok(h) => {
847 witness.fork_with(h);
848
849 i += 1;
850 }
851 Err(h) => {
852 witness.fork_with(h);
853
854 if i == len {
855 return Err(witness.finish());
856 }
857
858 ptr = u64::from_fixed_size_bytes(&self.read_child_ptr_buf(i + 1));
860 child = BTreeNode::<K, V>::from_ptr(ptr);
861
862 let rh = match child {
863 BTreeNode::Internal(n) => n.prove_absence::<V, Q>(key),
864 BTreeNode::Leaf(n) => {
865 let len = n.read_len();
866 n.prove_absence(0, len)
867 }
868 }
869 .unwrap();
870
871 witness.fork_with(rh);
872
873 i += 2;
874 }
875 }
876 }
877
878 Ok(witness.finish())
879 }
880
881 pub(crate) fn prove_range<V: AsHashTree + StableType + AsFixedSizeBytes, Q>(
882 &self,
883 from: &Q,
884 to: &Q,
885 ) -> HashTree
886 where
887 K: Borrow<Q>,
888 Q: Ord + ?Sized,
889 {
890 let len = self.read_len();
891
892 debug_assert!(len > 0);
893
894 let from_idx = match self.binary_search(from, len) {
895 Ok(idx) => idx,
896 Err(idx) => idx,
897 };
898
899 let to_idx = match self.binary_search(to, len) {
900 Ok(idx) => idx + 1,
901 Err(idx) => idx,
902 };
903
904 let mut witness = WitnessForker::default();
905
906 for i in 0..from_idx {
907 witness.fork_with(pruned(self.read_child_root_hash::<V>(i, true)));
908 }
909
910 for i in from_idx..(to_idx + 1).min(len + 1) {
911 let ptr = u64::from_fixed_size_bytes(&self.read_child_ptr_buf(i));
912 let child = BTreeNode::<K, V>::from_ptr(ptr);
913
914 let rh = match child {
915 BTreeNode::Internal(n) => n.prove_range::<V, Q>(from, to),
916 BTreeNode::Leaf(n) => n.prove_range(from, to),
917 };
918
919 witness.fork_with(rh);
920 }
921
922 for i in (to_idx + 1)..(len + 1) {
923 witness.fork_with(pruned(self.read_child_root_hash::<V>(i, true)));
924 }
925
926 witness.finish()
927 }
928
929 pub(crate) fn witness_with_replacement<V: StableType + AsFixedSizeBytes + AsHashTree>(
930 &self,
931 index: usize,
932 replace: HashTree,
933 len: usize,
934 ) -> HashTree {
935 debug_assert!(len > 0);
936
937 let mut witness = WitnessForker::default();
938
939 for i in 0..index {
940 witness.fork_with(pruned(self.read_child_root_hash::<V>(i, true)));
941 }
942
943 witness.fork_with(replace);
944
945 for i in (index + 1)..(len + 1) {
946 witness.fork_with(pruned(self.read_child_root_hash::<V>(i, true)));
947 }
948
949 witness.finish()
950 }
951}
952
953#[cfg(test)]
954mod tests {
955 use crate::collections::certified_btree_map::SCertifiedBTreeMap;
956 use crate::utils::certification::{
957 leaf, leaf_hash, merge_hash_trees, traverse_hashtree, AsHashTree, AsHashableBytes, Hash,
958 HashTree,
959 };
960 use crate::utils::test::generate_random_string;
961 use crate::{
962 _debug_validate_allocator, get_allocated_size, init_allocator, retrieve_custom_data,
963 stable, stable_memory_init, stable_memory_post_upgrade, stable_memory_pre_upgrade,
964 store_custom_data, SBox,
965 };
966 use ic_certified_map::RbTree;
967 use rand::rngs::ThreadRng;
968 use rand::seq::SliceRandom;
969 use rand::{thread_rng, Rng};
970 use std::borrow::Cow;
971
972 impl AsHashTree for u64 {
973 fn root_hash(&self) -> Hash {
974 leaf_hash(&self.to_le_bytes())
975 }
976
977 fn hash_tree(&self) -> HashTree {
978 leaf(self.to_le_bytes().to_vec())
979 }
980 }
981
982 impl AsHashableBytes for u64 {
983 fn as_hashable_bytes(&self) -> Vec<u8> {
984 self.to_le_bytes().to_vec()
985 }
986 }
987
988 #[test]
989 fn random_works_fine() {
990 stable::clear();
991 stable_memory_init();
992
993 {
994 let iterations = 1000;
995 let mut map = SCertifiedBTreeMap::<u64, u64>::default();
996
997 let mut example = Vec::new();
998 for i in 0..iterations {
999 example.push(i as u64);
1000 }
1001 example.shuffle(&mut thread_rng());
1002
1003 for i in 0..iterations {
1004 assert!(map.insert(example[i], example[i]).unwrap().is_none());
1005 map.inner.debug_print();
1006
1007 map.modified.debug_print();
1008 map.commit();
1009 map.modified.debug_print();
1010 println!();
1011
1012 for j in 0..i {
1013 let wit = map.witness_with(&example[j], |it| leaf(it.as_hashable_bytes()));
1014 assert_eq!(
1015 wit.reconstruct(),
1016 map.root_hash(),
1017 "invalid witness {:?}",
1018 wit
1019 );
1020 }
1021 }
1022
1023 assert_eq!(map.len(), iterations as u64);
1024 assert_eq!(map.is_empty(), false);
1025
1026 map.debug_print();
1027 println!();
1028 println!();
1029
1030 assert_eq!(map.insert(0, 1).unwrap().unwrap(), 0);
1031 assert_eq!(map.insert(0, 0).unwrap().unwrap(), 1);
1032
1033 example.shuffle(&mut thread_rng());
1034 for i in 0..iterations {
1035 assert_eq!(map.remove_and_commit(&example[i]), Some(example[i]));
1036
1037 for j in (i + 1)..iterations {
1038 let wit = map.witness_with(&example[j], |it| leaf(it.as_hashable_bytes()));
1039 assert_eq!(
1040 wit.reconstruct(),
1041 map.root_hash(),
1042 "invalid witness of {}: {:?}",
1043 example[j],
1044 wit
1045 );
1046 }
1047 }
1048
1049 map.debug_print();
1050 }
1051
1052 _debug_validate_allocator();
1053 assert_eq!(get_allocated_size(), 0);
1054 }
1055
1056 #[test]
1057 fn random_in_batches_works_fine() {
1058 stable::clear();
1059 stable_memory_init();
1060
1061 {
1062 let iterations = 10;
1063 let mut map = SCertifiedBTreeMap::<u64, u64>::default();
1064
1065 let mut example = Vec::new();
1066 for i in 0..(iterations * 100) {
1067 example.push(i as u64);
1068 }
1069 example.shuffle(&mut thread_rng());
1070
1071 for i in 0..iterations {
1072 for j in (i * 100)..((i + 1) * 100) {
1073 map.insert(example[j], example[j]);
1074 }
1075
1076 map.commit();
1077
1078 for j in 0..((i + 1) * 100) {
1079 let wit = map.witness_with(&example[j], |it| leaf(it.as_hashable_bytes()));
1080 assert_eq!(
1081 wit.reconstruct(),
1082 map.root_hash(),
1083 "invalid witness {:?}",
1084 wit
1085 );
1086 }
1087 }
1088
1089 for i in 0..iterations {
1090 for j in (i * 100)..((i + 1) * 100) {
1091 map.remove(&example[j]);
1092 }
1093
1094 map.commit();
1095
1096 for j in ((i + 1) * 100)..(iterations * 100) {
1097 let wit = map.witness_with(&example[j], |it| leaf(it.as_hashable_bytes()));
1098 assert_eq!(
1099 wit.reconstruct(),
1100 map.root_hash(),
1101 "invalid witness {:?}",
1102 wit
1103 );
1104 }
1105 }
1106 }
1107
1108 _debug_validate_allocator();
1109 assert_eq!(get_allocated_size(), 0);
1110 }
1111
1112 fn hash_tree_to_labeled_leaves(t: HashTree) -> Vec<HashTree> {
1113 let mut r = Vec::new();
1114
1115 let mut l = |it: &HashTree| {
1116 if let HashTree::Labeled(_, _) = it {
1117 r.push(it.clone())
1118 }
1119 };
1120
1121 traverse_hashtree(&t, &mut l);
1122
1123 r
1124 }
1125
1126 #[test]
1127 fn absence_proofs_work_fine() {
1128 stable::clear();
1129 stable_memory_init();
1130
1131 {
1132 let iterations = 100;
1133 let mut map = SCertifiedBTreeMap::<u64, u64>::default();
1134
1135 let proof = map.prove_absence(&0);
1136
1137 assert_eq!(
1138 proof.reconstruct(),
1139 map.root_hash(),
1140 "invalid proof {:?}",
1141 proof
1142 );
1143
1144 let leaves = hash_tree_to_labeled_leaves(proof);
1145 assert_eq!(leaves.len(), 0);
1146
1147 for i in 1..iterations {
1148 map.insert(i * 2, i * 2);
1149 }
1150
1151 map.commit();
1152 map.debug_print();
1153
1154 let proof = map.prove_absence(&0);
1155
1156 assert_eq!(
1157 proof.reconstruct(),
1158 map.root_hash(),
1159 "invalid proof {:?}",
1160 proof
1161 );
1162
1163 let leaves = hash_tree_to_labeled_leaves(proof);
1164 assert_eq!(leaves.len(), 1);
1165
1166 for i in 1..(iterations - 1) {
1167 let proof = map.prove_absence(&(i * 2 + 1));
1168
1169 assert_eq!(
1170 proof.reconstruct(),
1171 map.root_hash(),
1172 "invalid proof {:?}",
1173 proof
1174 );
1175
1176 let leaves = hash_tree_to_labeled_leaves(proof);
1177 assert_eq!(leaves.len(), 2);
1178 }
1179
1180 let proof = map.prove_absence(&300);
1181
1182 assert_eq!(
1183 proof.reconstruct(),
1184 map.root_hash(),
1185 "invalid proof {:?}",
1186 proof
1187 );
1188
1189 let leaves = hash_tree_to_labeled_leaves(proof);
1190 assert_eq!(leaves.len(), 1);
1191
1192 for i in 1..iterations {
1193 map.remove(&(i * 2));
1194 }
1195
1196 map.commit();
1197
1198 let proof = map.prove_absence(&0);
1199
1200 assert_eq!(
1201 proof.reconstruct(),
1202 map.root_hash(),
1203 "invalid proof {:?}",
1204 proof
1205 );
1206
1207 let leaves = hash_tree_to_labeled_leaves(proof);
1208 assert!(leaves.is_empty());
1209 }
1210
1211 _debug_validate_allocator();
1212 assert_eq!(get_allocated_size(), 0);
1213 }
1214
1215 #[test]
1216 fn merge_works_fine() {
1217 stable::clear();
1218 stable_memory_init();
1219
1220 #[derive(Debug, Default, Ord, PartialOrd, Eq, PartialEq, Copy, Clone)]
1221 struct U64(pub u64);
1222
1223 impl ic_certified_map::AsHashTree for U64 {
1224 fn as_hash_tree(&self) -> ic_certified_map::HashTree<'_> {
1225 ic_certified_map::HashTree::Leaf(Cow::Owned(self.0.to_le_bytes().to_vec()))
1226 }
1227
1228 fn root_hash(&self) -> ic_certified_map::Hash {
1229 ic_certified_map::leaf_hash(&self.0.to_le_bytes())
1230 }
1231 }
1232
1233 {
1234 let mut map = SCertifiedBTreeMap::<SBox<u64>, SBox<u64>>::default();
1235 let mut rb = RbTree::<[u8; 8], U64>::new();
1236
1237 for i in 0..100 {
1238 map.insert(SBox::new(i * 2).unwrap(), SBox::new(i * 2).unwrap())
1239 .unwrap();
1240
1241 rb.insert((i * 2).to_le_bytes(), U64(i * 2));
1242 }
1243
1244 map.commit();
1245
1246 let map_w1 = map.prove_absence(&11);
1247 let map_w2 = map.witness_with(&22, |val| leaf(val.as_hashable_bytes()));
1248
1249 assert_eq!(map_w1.reconstruct(), map.root_hash());
1250 assert_eq!(map_w2.reconstruct(), map.root_hash());
1251
1252 let w3 = merge_hash_trees(map_w1, map_w2);
1253 assert_eq!(w3.reconstruct(), map.root_hash());
1254
1255 let rb_w1 = rb.witness(&11u64.to_le_bytes());
1256 let rb_w2 = rb.witness(&22u64.to_le_bytes());
1257
1258 let rb_w3 = rb.key_range(&9u64.to_le_bytes(), &100u64.to_le_bytes());
1259 let map_w3 = map.prove_range(&9, &100);
1260 }
1261
1262 _debug_validate_allocator();
1263 assert_eq!(get_allocated_size(), 0);
1264 }
1265
1266 #[test]
1267 fn range_proofs_work_fine() {
1268 stable::clear();
1269 stable_memory_init();
1270
1271 {
1272 let iterations = 100;
1273 let mut map = SCertifiedBTreeMap::<u64, u64>::default();
1274
1275 for i in 0..iterations {
1276 map.insert(i, i);
1277 }
1278
1279 map.commit();
1280 map.debug_print();
1281
1282 for i in 0..iterations {
1283 for j in i..iterations {
1284 let proof = map.prove_range(&i, &j);
1285
1286 assert_eq!(
1287 proof.reconstruct(),
1288 map.root_hash(),
1289 "invalid proof {:?}",
1290 proof
1291 );
1292
1293 let leaves = hash_tree_to_labeled_leaves(proof);
1294 assert_eq!(leaves.len() as u64, j - i + 1, "{} {}", i, j);
1295 }
1296 }
1297 }
1298
1299 _debug_validate_allocator();
1300 assert_eq!(get_allocated_size(), 0);
1301 }
1302
1303 #[test]
1304 fn nested_maps_work_fine() {
1305 stable::clear();
1306 stable_memory_init();
1307
1308 {
1309 let mut map = SCertifiedBTreeMap::<u64, SCertifiedBTreeMap<u64, u64>>::default();
1310
1311 let mut nested_map_1 = SCertifiedBTreeMap::default();
1312 let mut nested_map_2 = SCertifiedBTreeMap::default();
1313
1314 nested_map_1.insert(1, 1);
1315 nested_map_1.commit();
1316
1317 nested_map_2.insert(2, 2);
1318 nested_map_2.commit();
1319
1320 map.insert(1, nested_map_1);
1321 map.insert(2, nested_map_2);
1322
1323 map.commit();
1324
1325 let composite_witness = map.witness_with(&1, |it| {
1326 it.witness_with(&1, |it1| leaf(it1.as_hashable_bytes()))
1327 });
1328
1329 assert_eq!(
1330 composite_witness.reconstruct(),
1331 map.root_hash(),
1332 "invalid witness {:?}",
1333 composite_witness
1334 );
1335
1336 let mut label_1_met = false;
1337 let mut label_2_met = false;
1338 let mut leave_met = false;
1339
1340 traverse_hashtree(&composite_witness, &mut |it| match it {
1341 HashTree::Labeled(l, _) => {
1342 assert!(1u64.as_hashable_bytes().eq(l));
1343
1344 if !label_1_met {
1345 label_1_met = true;
1346 } else if !label_2_met {
1347 label_2_met = true;
1348 } else {
1349 panic!("Extra label met");
1350 }
1351 }
1352 HashTree::Leaf(l) => {
1353 if !leave_met {
1354 leave_met = true;
1355 } else {
1356 panic!("Extra leave met");
1357 }
1358
1359 assert!(1u64.as_hashable_bytes().eq(l));
1360 }
1361 _ => {}
1362 });
1363
1364 assert!(label_1_met);
1365 assert!(label_2_met);
1366 assert!(leave_met);
1367 }
1368
1369 _debug_validate_allocator();
1370 assert_eq!(get_allocated_size(), 0);
1371 }
1372
1373 impl AsHashTree for String {
1374 fn root_hash(&self) -> Hash {
1375 leaf_hash(&self.as_hashable_bytes())
1376 }
1377
1378 fn hash_tree(&self) -> HashTree {
1379 leaf(self.as_hashable_bytes())
1380 }
1381 }
1382
1383 impl AsHashableBytes for String {
1384 fn as_hashable_bytes(&self) -> Vec<u8> {
1385 self.as_bytes().to_vec()
1386 }
1387 }
1388
1389 #[derive(Debug)]
1390 enum Action {
1391 Insert,
1392 Batch,
1393 Remove,
1394 Clear,
1395 CanisterUpgrade,
1396 }
1397
1398 struct Fuzzer {
1399 map: Option<SCertifiedBTreeMap<SBox<String>, SBox<String>>>,
1400 keys: Vec<String>,
1401 rng: ThreadRng,
1402 log: Vec<Action>,
1403 }
1404
1405 impl Fuzzer {
1406 fn new() -> Fuzzer {
1407 Fuzzer {
1408 map: Some(SCertifiedBTreeMap::new()),
1409 keys: Vec::new(),
1410 rng: thread_rng(),
1411 log: Vec::new(),
1412 }
1413 }
1414
1415 fn map(&mut self) -> &mut SCertifiedBTreeMap<SBox<String>, SBox<String>> {
1416 self.map.as_mut().unwrap()
1417 }
1418
1419 fn next(&mut self) {
1420 let action = self.rng.gen_range(0..120);
1421
1422 match action {
1423 0..=59 => {
1425 let key = generate_random_string(&mut self.rng);
1426 let value = generate_random_string(&mut self.rng);
1427
1428 if let Ok(key_data) = SBox::new(key.clone()) {
1429 if let Ok(val_data) = SBox::new(value.clone()) {
1430 if self.map().insert_and_commit(key_data, val_data).is_err() {
1431 return;
1432 }
1433
1434 match self.keys.binary_search(&key) {
1435 Ok(idx) => {}
1436 Err(idx) => {
1437 self.keys.insert(idx, key);
1438 }
1439 };
1440
1441 self.log.push(Action::Insert);
1442 }
1443 }
1444 }
1445 60..=89 => {
1447 let len = self.map().len();
1448
1449 if len == 0 {
1450 return self.next();
1451 }
1452
1453 let idx: u64 = self.rng.gen_range(0..len);
1454 let key = self.keys.remove(idx as usize);
1455
1456 self.map().remove_and_commit(&key).unwrap();
1457
1458 self.log.push(Action::Remove);
1459 }
1460 90..=99 => match SBox::new(self.map.take().unwrap()) {
1462 Ok(data) => {
1463 store_custom_data(1, data);
1464
1465 if stable_memory_pre_upgrade().is_ok() {
1466 stable_memory_post_upgrade();
1467 }
1468
1469 self.map = retrieve_custom_data::<
1470 SCertifiedBTreeMap<SBox<String>, SBox<String>>,
1471 >(1)
1472 .map(|it| it.into_inner());
1473
1474 self.log.push(Action::CanisterUpgrade);
1475 }
1476 Err(map) => {
1477 self.map = Some(map);
1478 }
1479 },
1480 100..=101 => {
1481 self.map().clear();
1482 self.keys.clear();
1483
1484 self.log.push(Action::Clear);
1485 }
1486 _ => {
1488 let count = self.rng.gen_range(0..10);
1489
1490 for i in 0..count {
1491 let act = self.rng.gen_range(0..10);
1492 match act {
1493 0..=7 => {
1494 let key = generate_random_string(&mut self.rng);
1495 let value = generate_random_string(&mut self.rng);
1496
1497 if let Ok(key_data) = SBox::new(key.clone()) {
1498 if let Ok(val_data) = SBox::new(value.clone()) {
1499 if self.map().insert(key_data, val_data).is_err() {
1500 continue;
1501 }
1502
1503 match self.keys.binary_search(&key) {
1504 Ok(idx) => {}
1505 Err(idx) => {
1506 self.keys.insert(idx, key);
1507 }
1508 };
1509 }
1510 }
1511 }
1512 _ => {
1513 let len = self.map().len();
1514
1515 if len == 0 {
1516 continue;
1517 }
1518
1519 let idx: u64 = self.rng.gen_range(0..len);
1520 let key = self.keys.remove(idx as usize);
1521
1522 self.map().remove(&key).unwrap();
1523 }
1524 }
1525 }
1526
1527 self.map().commit();
1528 self.log.push(Action::Batch);
1529 }
1530 }
1531
1532 _debug_validate_allocator();
1533
1534 let root_hash = self.map().root_hash();
1535
1536 for key in self.keys.clone() {
1537 let witness = self
1538 .map()
1539 .witness_with(&key, |it| leaf(it.as_hashable_bytes()));
1540
1541 assert_eq!(witness.reconstruct(), root_hash);
1542 }
1543
1544 for _ in 0..10 {
1545 let k = generate_random_string(&mut self.rng);
1546 let witness = self.map().prove_absence(&k);
1547
1548 assert_eq!(witness.reconstruct(), root_hash);
1549
1550 let k1 = generate_random_string(&mut self.rng);
1551
1552 let (max, min) = if k > k1 { (k, k1) } else { (k1, k) };
1553
1554 let witness = self.map().prove_range(&min, &max);
1555 assert_eq!(witness.reconstruct(), root_hash);
1556 }
1557 }
1558 }
1559
1560 #[test]
1561 fn fuzzer_works_fine() {
1562 stable::clear();
1563 init_allocator(0);
1564
1565 {
1566 let mut fuzzer = Fuzzer::new();
1567
1568 for _ in 0..500 {
1569 fuzzer.next();
1570 }
1571 }
1572
1573 assert_eq!(get_allocated_size(), 0);
1574 }
1575
1576 #[test]
1577 fn fuzzer_works_fine_limited_memory() {
1578 stable::clear();
1579 init_allocator(1);
1580
1581 {
1582 let mut fuzzer = Fuzzer::new();
1583
1584 for _ in 0..1000 {
1585 fuzzer.next();
1586 }
1587 }
1588
1589 assert_eq!(get_allocated_size(), 0);
1590 }
1591}