ic_stable_memory/collections/log/
mod.rs

1use crate::collections::log::iter::SLogIter;
2use crate::encoding::AsFixedSizeBytes;
3use crate::mem::allocator::EMPTY_PTR;
4use crate::mem::StablePtr;
5use crate::primitive::s_ref::SRef;
6use crate::primitive::s_ref_mut::SRefMut;
7use crate::primitive::StableType;
8use crate::{allocate, deallocate, OutOfMemory, SSlice};
9use std::fmt::Debug;
10use std::marker::PhantomData;
11
12#[doc(hidden)]
13pub mod iter;
14
15pub(crate) const DEFAULT_CAPACITY: u64 = 2;
16
17/// Non-reallocating growing vector optimized for storing logs or history entries
18///
19/// Very similar to [SVec](crate::collections::SVec), but internally does not perform reallocations
20/// nor moves of data. Instead, when the capacity is reached, a new block of stable memory (which
21/// here is called a `Sector`) is allocated and attached to the end of the current block. So, in
22/// some sense, this is a linked list, where each node can hold multiple elements.
23///
24/// `T` has to implement both [StableType] and [AsFixedSizeBytes]. [SLog] itself also implements these
25/// trait, which means that you can store it inside an another stable structure.
26///
27/// This data structure is "infinite" - it can handle up to [u64::MAX] elements.
28///
29/// [SLog] grows exponentially - each new `Sector` is twice as big as the previous one. But if the
30/// canister is short on stable memory, the newly created `Sector` may be shrunk, to be able to continue
31/// to grow.
32///
33/// It is well optimized when you access elements near the end (the most recently added). The further
34/// the element you access from the end, the worser the performance.
35pub struct SLog<T: StableType + AsFixedSizeBytes> {
36    len: u64,
37    first_sector_ptr: StablePtr,
38    cur_sector_ptr: StablePtr,
39    cur_sector_last_item_offset: u64,
40    cur_sector_capacity: u64,
41    cur_sector_len: u64,
42    stable_drop_flag: bool,
43    _marker: PhantomData<T>,
44}
45
46impl<T: StableType + AsFixedSizeBytes> SLog<T> {
47    /// Creates a new [SLog]
48    ///
49    /// Does not allocate any heap or stable memory.
50    #[inline]
51    pub fn new() -> Self {
52        Self {
53            len: 0,
54            first_sector_ptr: EMPTY_PTR,
55            cur_sector_ptr: EMPTY_PTR,
56            cur_sector_last_item_offset: 0,
57            cur_sector_capacity: DEFAULT_CAPACITY,
58            cur_sector_len: 0,
59            stable_drop_flag: true,
60            _marker: PhantomData::default(),
61        }
62    }
63
64    /// Inserts a new element at the end of the [SLog]
65    ///
66    /// May allocate a new `Sector`. If the canister is out of stable memory, will return [Err] with
67    /// the element that was about to get inserted.
68    ///
69    /// # Example
70    /// ```rust
71    /// # use ic_stable_memory::collections::SLog;
72    /// # use ic_stable_memory::stable_memory_init;
73    /// # unsafe { ic_stable_memory::mem::clear(); }
74    /// # stable_memory_init();
75    /// let mut log = SLog::new();
76    ///
77    /// log.push(10u64).expect("Out of memory");
78    /// ```
79    pub fn push(&mut self, it: T) -> Result<(), T> {
80        if let Ok(mut sector) = self.get_or_create_current_sector() {
81            if self.move_to_next_sector_if_needed(&mut sector).is_ok() {
82                sector.write_and_own_element(self.cur_sector_last_item_offset, it);
83                self.cur_sector_last_item_offset += T::SIZE as u64;
84                self.cur_sector_len += 1;
85                self.len += 1;
86
87                Ok(())
88            } else {
89                Err(it)
90            }
91        } else {
92            Err(it)
93        }
94    }
95
96    /// Removes an element from the end of the [SLog]
97    ///
98    /// If the [SLog] is empty, returns [None]. If it was the last element of the last `Sector` and
99    /// there are more `Sectors` before it, the last `Sector` gets deallocated, freeing the memory.
100    pub fn pop(&mut self) -> Option<T> {
101        if self.len == 0 {
102            return None;
103        }
104
105        let sector = self.get_current_sector()?;
106
107        self.cur_sector_last_item_offset -= T::SIZE as u64;
108        self.cur_sector_len -= 1;
109        self.len -= 1;
110
111        let it = sector.read_and_disown_element(self.cur_sector_last_item_offset);
112
113        self.move_to_prev_sector_if_needed(sector);
114
115        Some(it)
116    }
117
118    /// Removes all elements from this [SLog]
119    ///
120    /// Deallocates all `Sectors`, but the first one, freeing the memory.
121    #[inline]
122    pub fn clear(&mut self) {
123        while self.pop().is_some() {}
124    }
125
126    /// Returns an immutable reference [SRef] to the last element of this [SLog]
127    ///
128    /// If the [SLog] is empty, returns [None].
129    ///
130    /// # Example
131    /// ```rust
132    /// # use ic_stable_memory::collections::SLog;
133    /// # use ic_stable_memory::stable_memory_init;
134    /// # unsafe { ic_stable_memory::mem::clear(); }
135    /// # stable_memory_init();
136    /// let mut log = SLog::new();
137    ///
138    /// log.push(10u64).expect("Out of memory");
139    ///
140    /// assert_eq!(*log.last().unwrap(), 10);
141    /// ```
142    pub fn last(&self) -> Option<SRef<T>> {
143        if self.len == 0 {
144            return None;
145        }
146
147        let sector = self.get_current_sector()?;
148        let ptr = sector.get_element_ptr(self.cur_sector_last_item_offset - T::SIZE as u64);
149
150        unsafe { Some(SRef::new(ptr)) }
151    }
152
153    /// Efficiently returns an immutable reference [SRef] to the first element of this [SLog]
154    ///
155    /// If the [SLog] is empty, returns [None].
156    ///
157    /// # Example
158    /// ```rust
159    /// # use ic_stable_memory::collections::SLog;
160    /// # use ic_stable_memory::stable_memory_init;
161    /// # unsafe { ic_stable_memory::mem::clear(); }
162    /// # stable_memory_init();
163    /// let mut log = SLog::new();
164    ///
165    /// log.push(10u64).expect("Out of memory");
166    ///
167    /// assert_eq!(*log.first().unwrap(), 10);
168    /// ```
169    pub fn first(&self) -> Option<SRef<T>> {
170        if self.len == 0 {
171            return None;
172        }
173
174        let sector = self.get_first_sector()?;
175        let ptr = sector.get_element_ptr(0);
176
177        unsafe { Some(SRef::new(ptr)) }
178    }
179
180    /// Returns an immutable reference [SRef] to an element at the requested index
181    ///
182    /// See also [SLog::get_mut].
183    ///
184    /// The closer the index to `0`, the worser the performance of this call.
185    ///
186    /// If the [SLog] is empty, returns [None]
187    #[inline]
188    pub fn get(&self, idx: u64) -> Option<SRef<T>> {
189        let (sector, dif) = self.find_sector_for_idx(idx)?;
190        let ptr = sector.get_element_ptr((idx - dif) * T::SIZE as u64);
191
192        unsafe { Some(SRef::new(ptr)) }
193    }
194
195    /// Returns a mutable reference [SRefMut] to an element at the requested index
196    ///
197    /// See also [SLog::get].
198    ///
199    /// The closer the index to `0`, the worser the performance of this call.
200    ///
201    /// If the [SLog] is empty, returns [None]
202    #[inline]
203    pub fn get_mut(&mut self, idx: u64) -> Option<SRefMut<T>> {
204        let (sector, dif) = self.find_sector_for_idx(idx)?;
205        let ptr = sector.get_element_ptr((idx - dif) * T::SIZE as u64);
206
207        unsafe { Some(SRefMut::new(ptr)) }
208    }
209
210    /// Returns the length of this [SLog]
211    #[inline]
212    pub fn len(&self) -> u64 {
213        self.len
214    }
215
216    /// Returns true if the length of this [SLog] is `0`
217    #[inline]
218    pub fn is_empty(&self) -> bool {
219        self.len == 0
220    }
221
222    /// Returns a back-to-front iterator over this [SLog]
223    ///
224    /// This iterator contains elements from last to first.
225    ///
226    /// # Example
227    /// ```rust
228    /// # use ic_stable_memory::collections::SLog;
229    /// # use ic_stable_memory::stable_memory_init;
230    /// # unsafe { ic_stable_memory::mem::clear(); }
231    /// # stable_memory_init();
232    /// let mut log = SLog::new();
233    ///
234    /// for i in 0..100 {
235    ///     log.push(i).expect("Out of memory");
236    /// }
237    ///
238    /// let mut i = 99;
239    /// for elem in log.rev_iter() {
240    ///     assert_eq!(*elem, i);
241    ///     i -= 1;
242    /// }
243    /// ```
244    #[inline]
245    pub fn rev_iter(&self) -> SLogIter<'_, T> {
246        SLogIter::new(self)
247    }
248
249    fn find_sector_for_idx(&self, idx: u64) -> Option<(Sector<T>, u64)> {
250        if idx >= self.len || self.len == 0 {
251            return None;
252        }
253
254        let mut sector = Sector::<T>::from_ptr(self.cur_sector_ptr);
255        let mut sector_len = self.cur_sector_len;
256
257        let mut len = self.len;
258
259        loop {
260            len -= sector_len;
261            if len <= idx {
262                break;
263            }
264
265            sector = Sector::<T>::from_ptr(sector.read_prev_ptr());
266            sector_len = sector.read_capacity();
267        }
268
269        Some((sector, len))
270    }
271
272    fn get_or_create_current_sector(&mut self) -> Result<Sector<T>, OutOfMemory> {
273        if self.cur_sector_ptr == EMPTY_PTR {
274            self.cur_sector_capacity *= 2;
275
276            let it = Sector::<T>::new(self.cur_sector_capacity, EMPTY_PTR)?;
277
278            self.first_sector_ptr = it.as_ptr();
279            self.cur_sector_ptr = it.as_ptr();
280
281            Ok(it)
282        } else {
283            Ok(Sector::<T>::from_ptr(self.cur_sector_ptr))
284        }
285    }
286
287    #[inline]
288    fn get_current_sector(&self) -> Option<Sector<T>> {
289        if self.cur_sector_ptr == EMPTY_PTR {
290            None
291        } else {
292            Some(Sector::<T>::from_ptr(self.cur_sector_ptr))
293        }
294    }
295
296    #[inline]
297    fn get_first_sector(&self) -> Option<Sector<T>> {
298        if self.first_sector_ptr == EMPTY_PTR {
299            None
300        } else {
301            Some(Sector::<T>::from_ptr(self.first_sector_ptr))
302        }
303    }
304
305    fn move_to_prev_sector_if_needed(&mut self, sector: Sector<T>) {
306        if self.cur_sector_len > 0 {
307            return;
308        }
309
310        let prev_sector_ptr = sector.read_prev_ptr();
311        if prev_sector_ptr == EMPTY_PTR {
312            return;
313        }
314
315        let cur_sector = Sector::<T>::from_ptr(self.cur_sector_ptr);
316        cur_sector.destroy();
317
318        let mut prev_sector = Sector::<T>::from_ptr(prev_sector_ptr);
319        prev_sector.write_next_ptr(EMPTY_PTR);
320
321        self.cur_sector_capacity = prev_sector.read_capacity();
322        self.cur_sector_len = self.cur_sector_capacity;
323        self.cur_sector_ptr = prev_sector_ptr;
324        self.cur_sector_last_item_offset = self.cur_sector_capacity * T::SIZE as u64;
325    }
326
327    fn move_to_next_sector_if_needed(&mut self, sector: &mut Sector<T>) -> Result<(), OutOfMemory> {
328        if self.cur_sector_len < self.cur_sector_capacity {
329            return Ok(());
330        }
331
332        let mut next_sector_capacity = self.cur_sector_capacity.checked_mul(2).unwrap();
333        let mut new_sector = loop {
334            if next_sector_capacity <= DEFAULT_CAPACITY {
335                return Err(OutOfMemory);
336            }
337
338            match Sector::<T>::new(next_sector_capacity, sector.as_ptr()) {
339                Ok(s) => break s,
340                Err(_) => {
341                    next_sector_capacity /= 2;
342                    continue;
343                }
344            };
345        };
346
347        sector.write_next_ptr(new_sector.as_ptr());
348        new_sector.write_prev_ptr(sector.as_ptr());
349
350        self.cur_sector_capacity = next_sector_capacity;
351        self.cur_sector_ptr = new_sector.as_ptr();
352        self.cur_sector_len = 0;
353        self.cur_sector_last_item_offset = 0;
354
355        *sector = new_sector;
356
357        Ok(())
358    }
359}
360
361impl<T: StableType + AsFixedSizeBytes> Default for SLog<T> {
362    fn default() -> Self {
363        Self::new()
364    }
365}
366
367const PREV_OFFSET: u64 = 0;
368const NEXT_OFFSET: u64 = PREV_OFFSET + u64::SIZE as u64;
369const CAPACITY_OFFSET: u64 = NEXT_OFFSET + u64::SIZE as u64;
370const ELEMENTS_OFFSET: u64 = CAPACITY_OFFSET + u64::SIZE as u64;
371
372struct Sector<T>(u64, PhantomData<T>);
373
374impl<T: StableType + AsFixedSizeBytes> Sector<T> {
375    fn new(cap: u64, prev: StablePtr) -> Result<Self, OutOfMemory> {
376        let slice = unsafe { allocate(u64::SIZE as u64 * 3 + cap * T::SIZE as u64)? };
377
378        let mut it = Self(slice.as_ptr(), PhantomData::default());
379        it.write_prev_ptr(prev);
380        it.write_next_ptr(EMPTY_PTR);
381        it.write_capacity(cap);
382
383        Ok(it)
384    }
385
386    fn destroy(self) {
387        let slice = unsafe { SSlice::from_ptr(self.0).unwrap() };
388        deallocate(slice);
389    }
390
391    #[inline]
392    fn as_ptr(&self) -> StablePtr {
393        self.0
394    }
395
396    #[inline]
397    fn from_ptr(ptr: u64) -> Self {
398        Self(ptr, PhantomData::default())
399    }
400
401    #[inline]
402    fn read_prev_ptr(&self) -> StablePtr {
403        unsafe { crate::mem::read_fixed_for_reference(SSlice::_offset(self.0, PREV_OFFSET)) }
404    }
405
406    #[inline]
407    fn write_prev_ptr(&mut self, mut ptr: StablePtr) {
408        unsafe { crate::mem::write_fixed(SSlice::_offset(self.0, PREV_OFFSET), &mut ptr) }
409    }
410
411    #[inline]
412    fn read_next_ptr(&self) -> StablePtr {
413        unsafe { crate::mem::read_fixed_for_reference(SSlice::_offset(self.0, NEXT_OFFSET)) }
414    }
415
416    #[inline]
417    fn write_next_ptr(&mut self, mut ptr: StablePtr) {
418        unsafe { crate::mem::write_fixed(SSlice::_offset(self.0, NEXT_OFFSET), &mut ptr) }
419    }
420
421    #[inline]
422    fn read_capacity(&self) -> u64 {
423        unsafe { crate::mem::read_fixed_for_reference(SSlice::_offset(self.0, CAPACITY_OFFSET)) }
424    }
425
426    #[inline]
427    fn write_capacity(&mut self, mut cap: u64) {
428        unsafe { crate::mem::write_fixed(SSlice::_offset(self.0, CAPACITY_OFFSET), &mut cap) }
429    }
430
431    #[inline]
432    fn get_element_ptr(&self, offset: u64) -> u64 {
433        SSlice::_offset(self.0, ELEMENTS_OFFSET + offset)
434    }
435
436    #[inline]
437    fn read_and_disown_element(&self, offset: u64) -> T {
438        unsafe { crate::mem::read_fixed_for_move(self.get_element_ptr(offset)) }
439    }
440
441    #[inline]
442    fn get_element(&self, offset: u64) -> SRef<T> {
443        unsafe { SRef::new(self.get_element_ptr(offset)) }
444    }
445
446    #[inline]
447    fn get_element_mut(&mut self, offset: u64) -> SRefMut<T> {
448        unsafe { SRefMut::new(self.get_element_ptr(offset)) }
449    }
450
451    #[inline]
452    fn write_and_own_element(&self, offset: u64, mut element: T) {
453        unsafe { crate::mem::write_fixed(self.get_element_ptr(offset), &mut element) };
454    }
455}
456
457impl<T: StableType + AsFixedSizeBytes + Debug> SLog<T> {
458    /// Prints sectored representation of this [SLog]
459    ///
460    /// Useful for tests
461    pub fn debug_print(&self) {
462        let mut sector = if let Some(s) = self.get_first_sector() {
463            s
464        } else {
465            println!("SLog []");
466            return;
467        };
468
469        let mut current_sector_len = DEFAULT_CAPACITY * 2;
470
471        print!(
472            "SLog({}, {}, {}, {}, {}, {})",
473            self.len,
474            self.first_sector_ptr,
475            self.cur_sector_ptr,
476            self.cur_sector_len,
477            self.cur_sector_capacity,
478            self.cur_sector_last_item_offset
479        );
480
481        print!(" [");
482
483        loop {
484            print!("[");
485            let len = if sector.as_ptr() == self.cur_sector_ptr {
486                self.cur_sector_len
487            } else {
488                current_sector_len
489            };
490
491            let mut offset = 0;
492            for i in 0..len {
493                let elem = sector.get_element(offset);
494                offset += T::SIZE as u64;
495
496                print!("{:?}", *elem);
497                if i < len - 1 {
498                    print!(", ");
499                }
500            }
501            print!("]");
502
503            if sector.as_ptr() == self.cur_sector_ptr {
504                break;
505            }
506
507            print!(", ");
508
509            let next_sector_ptr = sector.read_next_ptr();
510            assert_ne!(next_sector_ptr, EMPTY_PTR);
511
512            sector = Sector::<T>::from_ptr(next_sector_ptr);
513            current_sector_len *= 2;
514        }
515
516        println!("]");
517    }
518}
519
520impl<T: StableType + AsFixedSizeBytes> AsFixedSizeBytes for SLog<T> {
521    const SIZE: usize = u64::SIZE * 6 + usize::SIZE;
522    type Buf = [u8; u64::SIZE * 6 + usize::SIZE];
523
524    fn as_fixed_size_bytes(&self, buf: &mut [u8]) {
525        self.len.as_fixed_size_bytes(&mut buf[0..u64::SIZE]);
526        self.first_sector_ptr
527            .as_fixed_size_bytes(&mut buf[u64::SIZE..(u64::SIZE * 2)]);
528        self.cur_sector_ptr
529            .as_fixed_size_bytes(&mut buf[(u64::SIZE * 2)..(u64::SIZE * 3)]);
530        self.cur_sector_last_item_offset
531            .as_fixed_size_bytes(&mut buf[(u64::SIZE * 3)..(u64::SIZE * 4)]);
532        self.cur_sector_capacity
533            .as_fixed_size_bytes(&mut buf[(u64::SIZE * 4)..(u64::SIZE * 5)]);
534        self.cur_sector_len
535            .as_fixed_size_bytes(&mut buf[(u64::SIZE * 5)..(u64::SIZE * 6)]);
536    }
537
538    fn from_fixed_size_bytes(buf: &[u8]) -> Self {
539        let len = u64::from_fixed_size_bytes(&buf[0..u64::SIZE]);
540        let first_sector_ptr = u64::from_fixed_size_bytes(&buf[u64::SIZE..(u64::SIZE * 2)]);
541        let cur_sector_ptr = u64::from_fixed_size_bytes(&buf[(u64::SIZE * 2)..(u64::SIZE * 3)]);
542        let cur_sector_last_item_offset =
543            u64::from_fixed_size_bytes(&buf[(u64::SIZE * 3)..(u64::SIZE * 4)]);
544        let cur_sector_capacity =
545            u64::from_fixed_size_bytes(&buf[(u64::SIZE * 4)..(u64::SIZE * 5)]);
546        let cur_sector_len = u64::from_fixed_size_bytes(&buf[(u64::SIZE * 5)..(u64::SIZE * 6)]);
547
548        Self {
549            len,
550            first_sector_ptr,
551            cur_sector_ptr,
552            cur_sector_len,
553            cur_sector_capacity,
554            cur_sector_last_item_offset,
555            stable_drop_flag: false,
556            _marker: PhantomData::default(),
557        }
558    }
559}
560
561impl<T: StableType + AsFixedSizeBytes> StableType for SLog<T> {
562    #[inline]
563    unsafe fn stable_drop_flag_off(&mut self) {
564        self.stable_drop_flag = false;
565    }
566
567    #[inline]
568    unsafe fn stable_drop_flag_on(&mut self) {
569        self.stable_drop_flag = true;
570    }
571
572    #[inline]
573    fn should_stable_drop(&self) -> bool {
574        self.stable_drop_flag
575    }
576
577    #[inline]
578    unsafe fn stable_drop(&mut self) {
579        self.clear();
580
581        if self.cur_sector_ptr != EMPTY_PTR {
582            let sector = Sector::<T>::from_ptr(self.cur_sector_ptr);
583            sector.destroy();
584        }
585    }
586}
587
588impl<T: StableType + AsFixedSizeBytes> Drop for SLog<T> {
589    fn drop(&mut self) {
590        if self.should_stable_drop() {
591            unsafe {
592                self.stable_drop();
593            }
594        }
595    }
596}
597
598#[cfg(test)]
599mod tests {
600    use crate::collections::log::SLog;
601    use crate::utils::test::generate_random_string;
602    use crate::{
603        _debug_validate_allocator, get_allocated_size, init_allocator, retrieve_custom_data,
604        stable, stable_memory_init, stable_memory_post_upgrade, stable_memory_pre_upgrade,
605        store_custom_data, SBox,
606    };
607    use rand::rngs::ThreadRng;
608    use rand::{thread_rng, Rng};
609
610    #[test]
611    fn works_fine() {
612        stable::clear();
613        stable_memory_init();
614
615        {
616            let mut log = SLog::new();
617
618            assert!(log.is_empty());
619
620            println!();
621            println!("PUSHES");
622
623            for i in 0..100 {
624                log.debug_print();
625
626                log.push(i);
627
628                for j in 0..i {
629                    assert_eq!(*log.get(j).unwrap(), j);
630                }
631            }
632
633            log.debug_print();
634
635            assert_eq!(log.len(), 100);
636            for i in 0..100 {
637                assert_eq!(*log.get(i).unwrap(), i);
638            }
639
640            println!();
641            println!("POPS");
642
643            for i in (20..100).rev() {
644                assert_eq!(log.pop().unwrap(), i);
645                log.debug_print();
646            }
647
648            println!();
649            println!("PUSHES again");
650
651            assert_eq!(log.len(), 20);
652            for i in 20..100 {
653                log.push(i);
654                log.debug_print();
655            }
656
657            for i in 0..100 {
658                assert_eq!(*log.get(i).unwrap(), i);
659            }
660
661            println!();
662            println!("POPS again");
663
664            for i in (0..100).rev() {
665                assert_eq!(log.pop().unwrap(), i);
666                log.debug_print();
667            }
668
669            assert!(log.pop().is_none());
670            assert!(log.is_empty());
671        }
672
673        _debug_validate_allocator();
674        assert_eq!(get_allocated_size(), 0);
675    }
676
677    #[test]
678    fn iter_works_fine() {
679        stable::clear();
680        stable_memory_init();
681
682        {
683            let mut log = SLog::new();
684
685            for i in 0..100 {
686                log.push(i);
687            }
688
689            let mut j = 99;
690
691            log.debug_print();
692
693            for mut i in log.rev_iter() {
694                assert_eq!(*i, j);
695                j -= 1;
696            }
697
698            log.debug_print();
699        }
700
701        _debug_validate_allocator();
702        assert_eq!(get_allocated_size(), 0);
703    }
704
705    enum Action {
706        Push,
707        Pop,
708        Clear,
709        CanisterUpgrade,
710    }
711
712    struct Fuzzer {
713        state: Option<SLog<SBox<String>>>,
714        example: Vec<String>,
715        rng: ThreadRng,
716        log: Vec<Action>,
717    }
718
719    impl Fuzzer {
720        fn new() -> Self {
721            Self {
722                state: Some(SLog::default()),
723                example: Vec::default(),
724                rng: thread_rng(),
725                log: Vec::default(),
726            }
727        }
728
729        fn it(&mut self) -> &mut SLog<SBox<String>> {
730            self.state.as_mut().unwrap()
731        }
732
733        fn next(&mut self) {
734            let action = self.rng.gen_range(0..101);
735
736            match action {
737                // PUSH ~60%
738                0..=60 => {
739                    let str = generate_random_string(&mut self.rng);
740
741                    if let Ok(data) = SBox::new(str.clone()) {
742                        self.it().push(data);
743                        self.example.push(str);
744
745                        self.log.push(Action::Push);
746                    }
747                }
748                // POP ~30%
749                61..=90 => {
750                    self.it().pop();
751                    self.example.pop();
752
753                    self.log.push(Action::Pop);
754                }
755                // CLEAR
756                91..=92 => {
757                    self.it().clear();
758                    self.example.clear();
759
760                    self.log.push(Action::Clear);
761                }
762                // CANISTER UPGRADE ~10%
763                _ => match SBox::new(self.state.take().unwrap()) {
764                    Ok(data) => {
765                        store_custom_data(1, data);
766
767                        if stable_memory_pre_upgrade().is_ok() {
768                            stable_memory_post_upgrade();
769                        }
770
771                        self.state =
772                            retrieve_custom_data::<SLog<SBox<String>>>(1).map(|it| it.into_inner());
773
774                        self.log.push(Action::CanisterUpgrade);
775                    }
776                    Err(log) => {
777                        self.state = Some(log);
778                    }
779                },
780            }
781
782            _debug_validate_allocator();
783            assert_eq!(self.it().len(), self.example.len() as u64);
784
785            for i in 0..self.it().len() {
786                assert_eq!(
787                    self.it().get(i).unwrap().clone(),
788                    self.example.get(i as usize).unwrap().clone()
789                );
790            }
791        }
792    }
793
794    #[test]
795    fn fuzzer_works_fine() {
796        stable::clear();
797        init_allocator(0);
798
799        {
800            let mut fuzzer = Fuzzer::new();
801
802            for _ in 0..10_000 {
803                fuzzer.next();
804            }
805        }
806
807        assert_eq!(get_allocated_size(), 0);
808    }
809
810    #[test]
811    fn fuzzer_works_fine_limited_memory() {
812        stable::clear();
813        init_allocator(10);
814
815        {
816            let mut fuzzer = Fuzzer::new();
817
818            for _ in 0..10_000 {
819                fuzzer.next();
820            }
821        }
822
823        assert_eq!(get_allocated_size(), 0);
824    }
825}