1use crate::collections::hash_map::iter::SHashMapIter;
2use crate::encoding::{AsFixedSizeBytes, Buffer};
3use crate::mem::allocator::EMPTY_PTR;
4use crate::mem::StablePtr;
5use crate::primitive::s_ref::SRef;
6use crate::primitive::s_ref_mut::SRefMut;
7use crate::primitive::StableType;
8use crate::utils::DebuglessUnwrap;
9use crate::{allocate, deallocate, OutOfMemory, SSlice};
10use std::borrow::Borrow;
11use std::fmt::{Debug, Formatter};
12use std::hash::{Hash, Hasher};
13use std::marker::PhantomData;
14use zwohash::ZwoHasher;
15
16#[doc(hidden)]
17pub mod iter;
18
19const KEYS_OFFSET: usize = 0;
24
25#[inline]
26const fn values_offset<K: AsFixedSizeBytes>(capacity: usize) -> usize {
27 KEYS_OFFSET + (1 + K::SIZE) * capacity
28}
29
30const DEFAULT_CAPACITY: usize = 7;
31
32const EMPTY: u8 = 0;
33const OCCUPIED: u8 = 255;
34
35type KeyHash = usize;
36
37pub struct SHashMap<K: StableType + AsFixedSizeBytes + Hash + Eq, V: StableType + AsFixedSizeBytes>
50{
51 table_ptr: u64,
52 len: usize,
53 cap: usize,
54 stable_drop_flag: bool,
55 _marker_k: PhantomData<K>,
56 _marker_v: PhantomData<V>,
57}
58
59impl<K: StableType + AsFixedSizeBytes + Hash + Eq, V: StableType + AsFixedSizeBytes>
60 SHashMap<K, V>
61{
62 #[inline]
76 pub fn new() -> Self {
77 Self {
78 table_ptr: EMPTY_PTR,
79 len: 0,
80 cap: DEFAULT_CAPACITY,
81 stable_drop_flag: true,
82 _marker_k: PhantomData::default(),
83 _marker_v: PhantomData::default(),
84 }
85 }
86
87 pub fn new_with_capacity(capacity: usize) -> Result<Self, OutOfMemory> {
103 assert!(capacity <= Self::max_capacity());
104
105 let size = (1 + K::SIZE + V::SIZE) * capacity;
106 let table = unsafe { allocate(size as u64)? };
107
108 let zeroed = vec![0u8; size];
109 unsafe { crate::mem::write_bytes(table.offset(0), &zeroed) };
110
111 Ok(Self {
112 table_ptr: table.as_ptr(),
113 len: 0,
114 cap: capacity,
115 stable_drop_flag: true,
116 _marker_k: PhantomData::default(),
117 _marker_v: PhantomData::default(),
118 })
119 }
120
121 pub fn insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
146 if self.table_ptr == EMPTY_PTR {
147 let size = (1 + K::SIZE + V::SIZE) * self.capacity();
148 if let Ok(table) = unsafe { allocate(size as u64) } {
149 let zeroed = vec![0u8; size];
150 unsafe { crate::mem::write_bytes(table.offset(0), &zeroed) };
151
152 self.table_ptr = table.as_ptr();
153 } else {
154 return Err((key, value));
155 }
156 }
157
158 let key_hash = Self::hash(&key);
159 let mut i = key_hash % self.capacity();
160
161 loop {
162 match self.get_key(i) {
163 Some(prev_key) => {
165 if (*prev_key).eq(&key) {
166 let prev_value = self.read_and_disown_val(i);
167 self.write_and_own_val(i, value);
168
169 return Ok(Some(prev_value));
170 } else {
171 i = (i + 1) % self.capacity();
172
173 continue;
174 }
175 }
176 None => {
177 if self.is_full() {
178 if let Ok(mut new) =
181 Self::new_with_capacity(self.capacity().checked_mul(2).unwrap() - 1)
182 {
183 for i in 0..self.cap {
184 if let Some(k) = self.read_and_disown_key(i) {
185 let v = self.read_and_disown_val(i);
186
187 new.insert(k, v).debugless_unwrap();
188 }
189 }
190
191 let res = new.insert(key, value).debugless_unwrap();
192 let slice = unsafe { SSlice::from_ptr(self.table_ptr).unwrap() };
193 deallocate(slice);
194
195 unsafe { self.stable_drop_flag_off() };
199
200 *self = new;
201
202 return Ok(res);
203 } else {
204 return Err((key, value));
205 }
206 }
207
208 self.write_and_own_key(i, Some(key));
209 self.write_and_own_val(i, value);
210
211 self.len += 1;
212
213 return Ok(None);
214 }
215 }
216 }
217 }
218
219 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
252 where
253 K: Borrow<Q>,
254 Q: Hash + Eq + ?Sized,
255 {
256 Some(self.remove_by_idx(self.find_inner_idx(key)?))
257 }
258
259 #[inline]
284 pub fn get<Q>(&self, key: &Q) -> Option<SRef<V>>
285 where
286 K: Borrow<Q>,
287 Q: Hash + Eq + ?Sized,
288 {
289 Some(self.get_val(self.find_inner_idx(key)?))
290 }
291
292 #[inline]
301 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<SRefMut<V>>
302 where
303 K: Borrow<Q>,
304 Q: Hash + Eq + ?Sized,
305 {
306 Some(self.get_val_mut(self.find_inner_idx(key)?))
307 }
308
309 #[inline]
314 pub fn contains_key<Q>(&self, key: &Q) -> bool
315 where
316 K: Borrow<Q>,
317 Q: Hash + Eq + ?Sized,
318 {
319 self.find_inner_idx(key).is_some()
320 }
321
322 #[inline]
324 pub const fn len(&self) -> usize {
325 self.len
326 }
327
328 #[inline]
330 pub const fn capacity(&self) -> usize {
331 self.cap
332 }
333
334 #[inline]
336 pub const fn max_capacity() -> usize {
337 u32::MAX as usize / (K::SIZE + V::SIZE)
338 }
339
340 #[inline]
342 pub fn is_empty(&self) -> bool {
343 self.len() == 0
344 }
345
346 #[inline]
348 pub const fn is_full(&self) -> bool {
349 self.len() == (self.capacity() >> 2) * 3
350 }
351
352 #[inline]
373 pub fn iter(&self) -> SHashMapIter<K, V> {
374 SHashMapIter::new(self)
375 }
376
377 pub fn clear(&mut self) {
379 if self.is_empty() {
380 return;
381 }
382
383 for i in 0..self.cap {
384 if let Some(k) = self.read_and_disown_key(i) {
385 let v = self.read_and_disown_val(i);
386
387 self.write_and_own_key(i, None);
388 }
389 }
390
391 self.len = 0;
392 }
393
394 pub fn retain<F>(&mut self, mut f: F)
396 where
397 F: FnMut(&K, &V) -> bool,
398 {
399 if self.is_empty() {
400 return;
401 }
402
403 for i in 0..self.cap {
404 if let Some(mut k) = self.read_and_disown_key(i) {
405 let mut v = self.read_and_disown_val(i);
406 if f(&k, &v) {
407 unsafe {
408 k.stable_drop_flag_off();
409 v.stable_drop_flag_off();
410 }
411
412 continue;
413 }
414
415 self.write_and_own_key(i, None);
416 self.len -= 1;
417 }
418
419 continue;
420 }
421 }
422
423 fn hash<T: Hash + ?Sized>(val: &T) -> KeyHash {
424 let mut hasher = ZwoHasher::default();
425 val.hash(&mut hasher);
426
427 hasher.finish() as KeyHash
428 }
429
430 fn remove_by_idx(&mut self, idx: usize) -> V {
431 let prev_value = self.read_and_disown_val(idx);
432 self.read_and_disown_key(idx).unwrap();
433
434 let mut i = idx;
435 let mut j = idx;
436
437 loop {
438 j = (j + 1) % self.capacity();
439 if j == idx {
440 break;
441 }
442
443 if let Some(next_key) = self.read_key_for_reference(j) {
444 let k = Self::hash(&next_key) % self.capacity();
445
446 if (j < i) ^ (k <= i) ^ (k > j) {
447 self.write_and_own_key(i, Some(next_key));
448 self.write_and_own_val(i, self.read_and_disown_val(j));
449
450 i = j;
451 }
452
453 continue;
454 }
455
456 break;
457 }
458
459 self.write_and_own_key(i, None);
460 self.len -= 1;
461
462 prev_value
463 }
464
465 fn find_inner_idx<Q>(&self, key: &Q) -> Option<usize>
466 where
467 K: Borrow<Q>,
468 Q: Hash + Eq + ?Sized,
469 {
470 if self.is_empty() {
471 return None;
472 }
473
474 let key_hash = Self::hash(key);
475 let mut i = key_hash % self.capacity();
476
477 loop {
478 if (*self.get_key(i)?).borrow().eq(key) {
479 return Some(i);
480 } else {
481 i = (i + 1) % self.capacity();
482 }
483 }
484 }
485
486 fn get_key(&self, idx: usize) -> Option<SRef<K>> {
487 let ptr = self.get_key_flag_ptr(idx);
488 let flag: u8 = unsafe { crate::mem::read_fixed_for_reference(ptr) };
489
490 match flag {
491 EMPTY => None,
492 OCCUPIED => Some(unsafe { SRef::new(ptr + 1) }),
493 _ => unreachable!(),
494 }
495 }
496
497 fn read_and_disown_key(&self, idx: usize) -> Option<K> {
498 let ptr = self.get_key_flag_ptr(idx);
499 let flag: u8 = unsafe { crate::mem::read_fixed_for_reference(ptr) };
500
501 match flag {
502 EMPTY => None,
503 OCCUPIED => Some(unsafe { crate::mem::read_fixed_for_move(ptr + 1) }),
504 _ => unreachable!(),
505 }
506 }
507
508 fn read_key_for_reference(&self, idx: usize) -> Option<K> {
509 let ptr = self.get_key_flag_ptr(idx);
510 let flag: u8 = unsafe { crate::mem::read_fixed_for_reference(ptr) };
511
512 match flag {
513 EMPTY => None,
514 OCCUPIED => Some(unsafe { crate::mem::read_fixed_for_reference(ptr + 1) }),
515 _ => unreachable!(),
516 }
517 }
518
519 fn write_and_own_key(&mut self, idx: usize, key: Option<K>) {
520 let ptr = self.get_key_flag_ptr(idx);
521
522 if let Some(mut k) = key {
523 unsafe { crate::mem::write_fixed(ptr, &mut OCCUPIED) };
524 unsafe { crate::mem::write_fixed(ptr + 1, &mut k) };
525
526 return;
527 }
528
529 unsafe { crate::mem::write_fixed(ptr, &mut EMPTY) };
530 }
531
532 #[inline]
533 fn get_val(&self, idx: usize) -> SRef<V> {
534 unsafe { SRef::new(self.get_value_ptr(idx)) }
535 }
536
537 #[inline]
538 fn get_val_mut(&self, idx: usize) -> SRefMut<V> {
539 unsafe { SRefMut::new(self.get_value_ptr(idx)) }
540 }
541
542 #[inline]
543 fn read_and_disown_val(&self, idx: usize) -> V {
544 unsafe { crate::mem::read_fixed_for_move(self.get_value_ptr(idx)) }
545 }
546
547 #[inline]
548 fn write_and_own_val(&mut self, idx: usize, mut val: V) {
549 unsafe { crate::mem::write_fixed(self.get_value_ptr(idx), &mut val) }
550 }
551
552 #[inline]
553 fn get_value_ptr(&self, idx: usize) -> StablePtr {
554 SSlice::_offset(
555 self.table_ptr,
556 (values_offset::<K>(self.capacity()) + V::SIZE * idx) as u64,
557 )
558 }
559
560 #[inline]
561 fn get_key_flag_ptr(&self, idx: usize) -> StablePtr {
562 SSlice::_offset(self.table_ptr, (KEYS_OFFSET + (1 + K::SIZE) * idx) as u64)
563 }
564
565 #[inline]
566 fn get_key_data_ptr(&self, idx: usize) -> StablePtr {
567 SSlice::_offset(
568 self.table_ptr,
569 (KEYS_OFFSET + (1 + K::SIZE) * idx + 1) as u64,
570 )
571 }
572
573 pub fn debug_print(&self) {
577 print!("Node({}, {})[", self.len(), self.capacity());
578 for i in 0..self.capacity() {
579 let k_flag: u8 =
580 unsafe { crate::mem::read_fixed_for_reference(self.get_key_flag_ptr(i)) };
581 let mut k_buf = K::Buf::new(K::SIZE);
582 let mut v_buf = V::Buf::new(V::SIZE);
583
584 unsafe { crate::mem::read_bytes(self.get_key_data_ptr(i), k_buf._deref_mut()) };
585 unsafe { crate::mem::read_bytes(self.get_value_ptr(i), v_buf._deref_mut()) };
586
587 print!("(");
588
589 match k_flag {
590 EMPTY => print!("<empty> = "),
591 OCCUPIED => print!("<occupied> = "),
592 _ => unreachable!(),
593 };
594
595 print!("{:?}, {:?})", k_buf._deref(), v_buf._deref());
596
597 if i < self.capacity() - 1 {
598 print!(", ");
599 }
600 }
601 println!("]");
602 }
603}
604
605impl<
606 K: StableType + AsFixedSizeBytes + Hash + Eq + Debug,
607 V: StableType + AsFixedSizeBytes + Debug,
608 > Debug for SHashMap<K, V>
609{
610 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
611 f.write_str("{")?;
612 for (idx, (k, v)) in self.iter().enumerate() {
613 k.fmt(f)?;
614 f.write_str(": ")?;
615 v.fmt(f)?;
616
617 if idx < self.len() - 1 {
618 f.write_str(", ")?;
619 }
620 }
621 f.write_str("}")
622 }
623}
624
625impl<K: StableType + AsFixedSizeBytes + Hash + Eq, V: StableType + AsFixedSizeBytes> Default
626 for SHashMap<K, V>
627{
628 #[inline]
629 fn default() -> Self {
630 Self::new()
631 }
632}
633
634impl<K: StableType + AsFixedSizeBytes + Hash + Eq, V: StableType + AsFixedSizeBytes>
635 AsFixedSizeBytes for SHashMap<K, V>
636{
637 const SIZE: usize = u64::SIZE + usize::SIZE * 2;
638 type Buf = [u8; u64::SIZE + usize::SIZE * 2];
639
640 fn as_fixed_size_bytes(&self, buf: &mut [u8]) {
641 self.table_ptr.as_fixed_size_bytes(&mut buf[0..u64::SIZE]);
642 self.len
643 .as_fixed_size_bytes(&mut buf[u64::SIZE..(usize::SIZE + u64::SIZE)]);
644 self.cap.as_fixed_size_bytes(
645 &mut buf[(usize::SIZE + u64::SIZE)..(usize::SIZE * 2 + u64::SIZE)],
646 );
647 }
648
649 fn from_fixed_size_bytes(buf: &[u8]) -> Self {
650 let table_ptr = u64::from_fixed_size_bytes(&buf[0..u64::SIZE]);
651 let len = usize::from_fixed_size_bytes(&buf[u64::SIZE..(usize::SIZE + u64::SIZE)]);
652 let cap = usize::from_fixed_size_bytes(
653 &buf[(usize::SIZE + u64::SIZE)..(usize::SIZE * 2 + u64::SIZE)],
654 );
655
656 Self {
657 table_ptr,
658 len,
659 cap,
660 stable_drop_flag: false,
661 _marker_k: PhantomData::default(),
662 _marker_v: PhantomData::default(),
663 }
664 }
665}
666
667impl<K: StableType + AsFixedSizeBytes + Hash + Eq, V: StableType + AsFixedSizeBytes> StableType
668 for SHashMap<K, V>
669{
670 #[inline]
671 unsafe fn stable_drop_flag_off(&mut self) {
672 self.stable_drop_flag = false;
673 }
674
675 #[inline]
676 unsafe fn stable_drop_flag_on(&mut self) {
677 self.stable_drop_flag = true;
678 }
679
680 #[inline]
681 fn should_stable_drop(&self) -> bool {
682 self.stable_drop_flag
683 }
684
685 unsafe fn stable_drop(&mut self) {
686 if self.table_ptr != EMPTY_PTR {
687 self.clear();
688
689 let slice = SSlice::from_ptr(self.table_ptr).unwrap();
690 deallocate(slice);
691 }
692 }
693}
694
695impl<K: StableType + AsFixedSizeBytes + Hash + Eq, V: StableType + AsFixedSizeBytes> Drop
696 for SHashMap<K, V>
697{
698 fn drop(&mut self) {
699 if self.should_stable_drop() {
700 unsafe {
701 self.stable_drop();
702 }
703 }
704 }
705}
706
707#[cfg(test)]
708mod tests {
709 use crate::collections::hash_map::SHashMap;
710 use crate::encoding::AsFixedSizeBytes;
711 use crate::primitive::s_box::SBox;
712 use crate::primitive::StableType;
713 use crate::utils::mem_context::stable;
714 use crate::utils::test::generate_random_string;
715 use crate::utils::DebuglessUnwrap;
716 use crate::{
717 _debug_validate_allocator, get_allocated_size, init_allocator, retrieve_custom_data,
718 stable_memory_init, stable_memory_post_upgrade, stable_memory_pre_upgrade,
719 store_custom_data,
720 };
721 use rand::rngs::ThreadRng;
722 use rand::seq::SliceRandom;
723 use rand::{thread_rng, Rng};
724 use std::collections::HashMap;
725 use std::ops::Deref;
726
727 #[test]
728 fn simple_flow_works_well() {
729 stable::clear();
730 stable_memory_init();
731
732 {
733 let mut map = SHashMap::new_with_capacity(3).debugless_unwrap();
734
735 let k1 = 1;
736 let k2 = 2;
737 let k3 = 3;
738 let k4 = 4;
739 let k5 = 5;
740 let k6 = 6;
741 let k7 = 7;
742 let k8 = 8;
743
744 map.insert(k1, 1);
745 map.insert(k2, 2);
746 map.insert(k3, 3);
747 map.insert(k4, 4);
748 map.insert(k5, 5);
749 map.insert(k6, 6);
750 map.insert(k7, 7);
751 map.insert(k8, 8);
752
753 assert_eq!(*map.get(&k1).unwrap(), 1);
754 assert_eq!(*map.get(&k2).unwrap(), 2);
755 assert_eq!(*map.get(&k3).unwrap(), 3);
756 assert_eq!(*map.get(&k4).unwrap(), 4);
757 assert_eq!(*map.get(&k5).unwrap(), 5);
758 assert_eq!(*map.get(&k6).unwrap(), 6);
759 assert_eq!(*map.get(&k7).unwrap(), 7);
760 assert_eq!(*map.get(&k8).unwrap(), 8);
761
762 assert!(map.get(&9).is_none());
763 assert!(map.get(&0).is_none());
764
765 assert_eq!(map.remove(&k3).unwrap(), 3);
766 assert!(map.get(&k3).is_none());
767
768 assert_eq!(map.remove(&k1).unwrap(), 1);
769 assert!(map.get(&k1).is_none());
770
771 assert_eq!(map.remove(&k5).unwrap(), 5);
772 assert!(map.get(&k5).is_none());
773
774 assert_eq!(map.remove(&k7).unwrap(), 7);
775 assert!(map.get(&k7).is_none());
776
777 assert_eq!(*map.get(&k2).unwrap(), 2);
778 assert_eq!(*map.get(&k4).unwrap(), 4);
779 assert_eq!(*map.get(&k6).unwrap(), 6);
780 assert_eq!(*map.get(&k8).unwrap(), 8);
781 }
782
783 _debug_validate_allocator();
784 assert_eq!(get_allocated_size(), 0);
785 }
786
787 #[test]
788 fn basic_flow_works_fine() {
789 stable::clear();
790 stable_memory_init();
791
792 {
793 let mut map = SHashMap::new_with_capacity(3).debugless_unwrap();
794
795 assert!(map.remove(&10).is_none());
796 assert!(map.get(&10).is_none());
797
798 let it = map.insert(1, 1).unwrap();
799 assert!(it.is_none());
800 assert!(map.insert(2, 2).unwrap().is_none());
801 assert!(map.insert(3, 3).unwrap().is_none());
802 assert_eq!(map.insert(1, 10).unwrap().unwrap(), 1);
803
804 assert!(map.remove(&5).is_none());
805 assert_eq!(map.remove(&1).unwrap(), 10);
806
807 assert!(map.contains_key(&2));
808 assert!(!map.contains_key(&5));
809
810 map.debug_print();
811
812 let mut map = SHashMap::default();
813 for i in 0..100 {
814 assert!(map.insert(i, i).unwrap().is_none());
815 }
816
817 for i in 0..100 {
818 assert_eq!(*map.get(&i).unwrap(), i);
819 }
820
821 for i in 0..100 {
822 assert_eq!(map.remove(&(99 - i)).unwrap(), 99 - i);
823 }
824 }
825
826 _debug_validate_allocator();
827 assert_eq!(get_allocated_size(), 0);
828 }
829
830 #[test]
831 fn removes_work() {
832 stable::clear();
833 stable_memory_init();
834
835 {
836 let mut map = SHashMap::new();
837
838 for i in 0..500 {
839 map.insert(499 - i, i);
840 }
841
842 let mut vec = (200..300).collect::<Vec<_>>();
843 vec.shuffle(&mut thread_rng());
844
845 for i in vec {
846 map.remove(&i);
847 }
848
849 for i in 500..5000 {
850 map.insert(i, i);
851 }
852
853 for i in 200..300 {
854 map.insert(i, i);
855 }
856
857 let mut vec = (0..5000).collect::<Vec<_>>();
858 vec.shuffle(&mut thread_rng());
859
860 for i in vec {
861 map.remove(&i);
862 }
863 }
864
865 _debug_validate_allocator();
866 assert_eq!(get_allocated_size(), 0);
867 }
868
869 #[test]
870 fn serialization_work_fine() {
871 stable::clear();
872 stable_memory_init();
873
874 {
875 let mut map = SHashMap::new();
876 map.insert(0, 0);
877
878 let len = map.len();
879 let cap = map.capacity();
880 let ptr = map.table_ptr;
881
882 let buf = map.as_new_fixed_size_bytes();
883
884 let map1 = SHashMap::<i32, i32>::from_fixed_size_bytes(&buf);
885
886 assert_eq!(len, map1.len());
887 assert_eq!(cap, map1.capacity());
888 assert_eq!(ptr, map1.table_ptr);
889 }
890
891 _debug_validate_allocator();
892 assert_eq!(get_allocated_size(), 0);
893 }
894
895 #[test]
896 fn iter_works_fine() {
897 stable::clear();
898 stable_memory_init();
899
900 {
901 let mut map = SHashMap::new();
902 for i in 0..100 {
903 map.insert(i, i);
904 }
905
906 let mut c = 0;
907 for (mut k, _) in map.iter() {
908 c += 1;
909
910 assert!(*k < 100);
911 }
912
913 assert_eq!(c, 100);
914 }
915
916 _debug_validate_allocator();
917 assert_eq!(get_allocated_size(), 0);
918 }
919
920 #[test]
921 fn sboxes_work_fine() {
922 stable::clear();
923 stable_memory_init();
924
925 {
926 let mut map = SHashMap::new();
927
928 for i in 0..100 {
929 map.insert(SBox::new(i).unwrap(), i).unwrap();
930 }
931
932 println!("sbox mut");
933 let mut map = SHashMap::new();
934
935 for i in 0..100 {
936 map.insert(SBox::new(i).unwrap(), i).unwrap();
937 }
938 }
939
940 _debug_validate_allocator();
941 assert_eq!(get_allocated_size(), 0);
942 }
943
944 #[derive(Debug)]
945 enum Action {
946 Insert,
947 Remove,
948 Clear,
949 CanisterUpgrade,
950 }
951
952 struct Fuzzer {
953 map: Option<SHashMap<SBox<String>, SBox<String>>>,
954 example: HashMap<String, String>,
955 keys: Vec<String>,
956 rng: ThreadRng,
957 log: Vec<Action>,
958 }
959
960 impl Fuzzer {
961 fn new() -> Fuzzer {
962 Fuzzer {
963 map: Some(SHashMap::new()),
964 example: HashMap::new(),
965 keys: Vec::new(),
966 rng: thread_rng(),
967 log: Vec::new(),
968 }
969 }
970
971 fn map(&mut self) -> &mut SHashMap<SBox<String>, SBox<String>> {
972 self.map.as_mut().unwrap()
973 }
974
975 fn next(&mut self) {
976 let action = self.rng.gen_range(0..101);
977
978 match action {
979 0..=59 => {
981 let key = generate_random_string(&mut self.rng);
982 let value = generate_random_string(&mut self.rng);
983
984 if let Ok(key_data) = SBox::new(key.clone()) {
985 if let Ok(val_data) = SBox::new(value.clone()) {
986 if self.map().insert(key_data, val_data).is_err() {
987 return;
988 }
989 self.example.insert(key.clone(), value);
990
991 match self.keys.binary_search(&key) {
992 Ok(idx) => {}
993 Err(idx) => {
994 self.keys.insert(idx, key);
995 }
996 };
997
998 self.log.push(Action::Insert);
999 }
1000 }
1001 }
1002 60..=89 => {
1004 let len = self.map().len();
1005
1006 if len == 0 {
1007 return self.next();
1008 }
1009
1010 let idx = self.rng.gen_range(0..len);
1011 let key = self.keys.remove(idx);
1012
1013 self.map().remove(&key).unwrap();
1014 self.example.remove(&key).unwrap();
1015
1016 self.log.push(Action::Remove);
1017 }
1018 90..=91 => {
1019 self.map().clear();
1020 self.example.clear();
1021 self.keys.clear();
1022
1023 self.log.push(Action::Clear);
1024 }
1025 _ => match SBox::new(self.map.take().unwrap()) {
1027 Ok(data) => {
1028 store_custom_data(1, data);
1029
1030 if stable_memory_pre_upgrade().is_ok() {
1031 stable_memory_post_upgrade();
1032 }
1033
1034 self.map = retrieve_custom_data::<SHashMap<SBox<String>, SBox<String>>>(1)
1035 .map(|it| it.into_inner());
1036
1037 self.log.push(Action::CanisterUpgrade);
1038 }
1039 Err(map) => {
1040 self.map = Some(map);
1041 }
1042 },
1043 }
1044
1045 _debug_validate_allocator();
1046 assert_eq!(self.map().len(), self.example.len());
1047
1048 for key in self.keys.clone() {
1049 let contains = self.map().contains_key(&key);
1050 assert!(contains);
1051
1052 assert_eq!(
1053 self.map().get(&key).unwrap().clone(),
1054 self.example.get(&key).unwrap().clone()
1055 );
1056 }
1057 }
1058 }
1059
1060 #[test]
1061 fn fuzzer_works_fine() {
1062 stable::clear();
1063 init_allocator(0);
1064
1065 {
1066 let mut fuzzer = Fuzzer::new();
1067
1068 for _ in 0..10_000 {
1069 fuzzer.next();
1070 }
1071 }
1072
1073 assert_eq!(get_allocated_size(), 0);
1074 }
1075
1076 #[test]
1077 fn fuzzer_works_fine_limited_memory() {
1078 stable::clear();
1079 init_allocator(10);
1080
1081 {
1082 let mut fuzzer = Fuzzer::new();
1083
1084 for _ in 0..10_000 {
1085 fuzzer.next();
1086 }
1087 }
1088
1089 assert_eq!(get_allocated_size(), 0);
1090 }
1091}