ic_stable_memory/utils/
mem_context.rs

1//! This module defines an isomorphic wrapper around raw stable memory API from `ic_cdk` crate.
2//!
3//! When compiled to wasm, each function simply inlines into a call to the same function of raw API.
4//! For example, [MemContext::size_pages()] on wasm simply transforms into [ic_cdk::api::stable::stable64_size()].
5//!
6//! But when compiled to something else, a stable memory emulation is enabled, which allows all APIs
7//! continue to work even when running inside a `cargo test`, allocating stable memory on heap. This
8//! emulation is pretty accurate in terms of performance. If your algorithm is 4 times slower in stable
9//! memory tests, than in heap, than it is pretty likely that it will be 4 times expensive inside a read
10//! canister's stable memory, than in its heap.
11//!
12//! This makes it possible to write full-scale tests which use stable memory as their main memory.
13
14use std::cmp::min;
15
16/// Each wasm memory page is 64K in size
17pub const PAGE_SIZE_BYTES: u64 = 64 * 1024;
18
19/// Indicates that the canister is out of stable memory at this moment.
20#[derive(Debug, Copy, Clone)]
21pub struct OutOfMemory;
22
23pub(crate) trait MemContext {
24    fn size_pages(&self) -> u64;
25    fn grow(&mut self, new_pages: u64) -> Result<u64, OutOfMemory>;
26    fn read(&self, offset: u64, buf: &mut [u8]);
27    fn write(&mut self, offset: u64, buf: &[u8]);
28}
29
30#[derive(Clone)]
31pub(crate) struct StableMemContext;
32
33#[cfg(target_family = "wasm")]
34use ic_cdk::api::stable::{stable64_grow, stable64_read, stable64_size, stable64_write};
35
36#[cfg(target_family = "wasm")]
37impl MemContext for StableMemContext {
38    #[inline]
39    fn size_pages(&self) -> u64 {
40        stable64_size()
41    }
42
43    #[inline]
44    fn grow(&mut self, new_pages: u64) -> Result<u64, OutOfMemory> {
45        stable64_grow(new_pages).map_err(|_| OutOfMemory)
46    }
47
48    #[inline]
49    fn read(&self, offset: u64, buf: &mut [u8]) {
50        stable64_read(offset, buf)
51    }
52
53    #[inline]
54    fn write(&mut self, offset: u64, buf: &[u8]) {
55        stable64_write(offset, buf)
56    }
57}
58
59#[derive(Clone)]
60pub(crate) struct TestMemContext {
61    pub pages: Vec<[u8; PAGE_SIZE_BYTES as usize]>,
62}
63
64impl TestMemContext {
65    const fn default() -> Self {
66        Self { pages: Vec::new() }
67    }
68}
69
70impl MemContext for TestMemContext {
71    #[inline]
72    fn size_pages(&self) -> u64 {
73        self.pages.len() as u64
74    }
75
76    fn grow(&mut self, new_pages: u64) -> Result<u64, OutOfMemory> {
77        let prev_pages = self.size_pages();
78
79        for _ in 0..new_pages {
80            self.pages.push([0u8; PAGE_SIZE_BYTES as usize]);
81        }
82
83        Ok(prev_pages)
84    }
85
86    fn read(&self, offset: u64, buf: &mut [u8]) {
87        let start_page_idx = (offset / PAGE_SIZE_BYTES) as usize;
88        let start_page_inner_idx = (offset % PAGE_SIZE_BYTES) as usize;
89        let start_page_size = min(PAGE_SIZE_BYTES as usize - start_page_inner_idx, buf.len());
90
91        let (pages_in_between, last_page_size) = if start_page_size == buf.len() {
92            (0usize, 0usize)
93        } else {
94            (
95                (buf.len() - start_page_size) / PAGE_SIZE_BYTES as usize,
96                (buf.len() - start_page_size) % PAGE_SIZE_BYTES as usize,
97            )
98        };
99
100        // read first page
101        buf[0..start_page_size].copy_from_slice(
102            &self.pages[start_page_idx]
103                [start_page_inner_idx..(start_page_inner_idx + start_page_size)],
104        );
105
106        // read pages in-between
107        for i in 0..pages_in_between {
108            buf[(start_page_size + i * PAGE_SIZE_BYTES as usize)
109                ..(start_page_size + (i + 1) * PAGE_SIZE_BYTES as usize)]
110                .copy_from_slice(&self.pages[start_page_idx + i + 1]);
111        }
112
113        // read last pages
114        if last_page_size == 0 {
115            return;
116        }
117
118        buf[(start_page_size + pages_in_between * PAGE_SIZE_BYTES as usize)
119            ..(start_page_size + pages_in_between * PAGE_SIZE_BYTES as usize + last_page_size)]
120            .copy_from_slice(&self.pages[start_page_idx + pages_in_between + 1][0..last_page_size]);
121    }
122
123    fn write(&mut self, offset: u64, buf: &[u8]) {
124        let start_page_idx = (offset / PAGE_SIZE_BYTES) as usize;
125        let start_page_inner_idx = (offset % PAGE_SIZE_BYTES) as usize;
126        let start_page_size = min(PAGE_SIZE_BYTES as usize - start_page_inner_idx, buf.len());
127
128        let (pages_in_between, last_page_size) = if start_page_size == buf.len() {
129            (0usize, 0usize)
130        } else {
131            (
132                (buf.len() - start_page_size) / PAGE_SIZE_BYTES as usize,
133                (buf.len() - start_page_size) % PAGE_SIZE_BYTES as usize,
134            )
135        };
136
137        // write to first page
138        self.pages[start_page_idx][start_page_inner_idx..(start_page_inner_idx + start_page_size)]
139            .copy_from_slice(&buf[0..start_page_size]);
140
141        // write to pages in-between
142        for i in 0..pages_in_between {
143            self.pages[start_page_idx + i + 1].copy_from_slice(
144                &buf[(start_page_size + i * PAGE_SIZE_BYTES as usize)
145                    ..(start_page_size + (i + 1) * PAGE_SIZE_BYTES as usize)],
146            );
147        }
148
149        // write to last page
150        if last_page_size == 0 {
151            return;
152        }
153
154        self.pages[start_page_idx + pages_in_between + 1][0..last_page_size].copy_from_slice(
155            &buf[(start_page_size + pages_in_between * PAGE_SIZE_BYTES as usize)
156                ..(start_page_size + pages_in_between * PAGE_SIZE_BYTES as usize + last_page_size)],
157        );
158    }
159}
160
161#[cfg(target_family = "wasm")]
162pub mod stable {
163    use crate::utils::mem_context::{MemContext, OutOfMemory, StableMemContext};
164
165    #[inline]
166    pub fn size_pages() -> u64 {
167        MemContext::size_pages(&StableMemContext)
168    }
169
170    #[inline]
171    pub fn grow(new_pages: u64) -> Result<u64, OutOfMemory> {
172        MemContext::grow(&mut StableMemContext, new_pages)
173    }
174
175    #[inline]
176    pub fn read(offset: u64, buf: &mut [u8]) {
177        MemContext::read(&StableMemContext, offset, buf)
178    }
179
180    #[inline]
181    pub fn write(offset: u64, buf: &[u8]) {
182        MemContext::write(&mut StableMemContext, offset, buf)
183    }
184}
185
186#[cfg(not(target_family = "wasm"))]
187pub mod stable {
188    use crate::utils::mem_context::{MemContext, OutOfMemory, TestMemContext};
189    use std::cell::RefCell;
190
191    thread_local! {
192        static CONTEXT: RefCell<TestMemContext> = RefCell::new(TestMemContext::default());
193    }
194
195    #[inline]
196    pub fn clear() {
197        CONTEXT.with(|it| it.borrow_mut().pages.clear())
198    }
199
200    #[inline]
201    pub fn size_pages() -> u64 {
202        CONTEXT.with(|it| it.borrow().size_pages())
203    }
204
205    #[inline]
206    pub fn grow(new_pages: u64) -> Result<u64, OutOfMemory> {
207        CONTEXT.with(|it| it.borrow_mut().grow(new_pages))
208    }
209
210    #[inline]
211    pub fn read(offset: u64, buf: &mut [u8]) {
212        CONTEXT.with(|it| it.borrow().read(offset, buf))
213    }
214
215    #[inline]
216    pub fn write(offset: u64, buf: &[u8]) {
217        CONTEXT.with(|it| it.borrow_mut().write(offset, buf))
218    }
219}
220
221#[cfg(test)]
222mod tests {
223    use crate::{stable, PAGE_SIZE_BYTES};
224    use rand::seq::SliceRandom;
225    use rand::{thread_rng, Rng};
226
227    #[test]
228    fn random_works_fine() {
229        for _ in 0..100 {
230            stable::clear();
231            stable::grow(1000).unwrap();
232
233            let mut rng = thread_rng();
234            let iterations = 500usize;
235            let size_range = (0..(u16::MAX as usize * 2));
236
237            let mut sizes = Vec::new();
238            let mut cur_ptr = 0;
239            for i in 0..iterations {
240                let size = rng.gen_range(size_range.clone());
241                let buf = vec![(i % 256) as u8; size];
242
243                stable::write(cur_ptr, &buf);
244
245                sizes.push(size);
246                cur_ptr += size as u64;
247
248                let mut c_ptr = 0u64;
249                for j in 0..i {
250                    let size = sizes[j];
251                    let mut buf = vec![0u8; size];
252
253                    stable::read(c_ptr, &mut buf);
254
255                    assert_eq!(buf, vec![(j % 256) as u8; size]);
256
257                    c_ptr += size as u64;
258                }
259            }
260        }
261    }
262
263    #[test]
264    fn big_reads_writes_work_fine() {
265        stable::clear();
266        stable::grow(10).unwrap();
267
268        let buf = [10u8; PAGE_SIZE_BYTES as usize * 10];
269        stable::write(0, &buf);
270
271        let mut buf1 = [0u8; PAGE_SIZE_BYTES as usize * 10 - 50];
272        stable::read(25, &mut buf1);
273
274        assert_eq!(buf[25..PAGE_SIZE_BYTES as usize * 10 - 25], buf1);
275    }
276}