Skip to main content

wasm_dbms_memory/
memory_manager.rs

1// Rust guideline compliant 2026-03-01
2// X-WHERE-CLAUSE, M-CANONICAL-DOCS
3
4//! Memory manager for page-level memory operations.
5
6use wasm_dbms_api::prelude::{
7    DataSize, Encode, MSize, MemoryError, MemoryResult, Page, PageOffset,
8};
9
10use crate::memory_access::MemoryAccess;
11use crate::provider::MemoryProvider;
12
13/// Schema page (reserved page 0).
14pub const SCHEMA_PAGE: Page = 0;
15/// ACL page (reserved page 1).
16pub const ACL_PAGE: Page = 1;
17
18/// The memory manager handles page-level memory operations on top of a
19/// [`MemoryProvider`].
20pub struct MemoryManager<P>
21where
22    P: MemoryProvider,
23{
24    provider: P,
25}
26
27impl<P> MemoryManager<P>
28where
29    P: MemoryProvider,
30{
31    /// Initializes the memory manager and allocates the header and reserved
32    /// pages.
33    ///
34    /// # Panics
35    ///
36    /// Panics if the memory provider fails to initialize.
37    pub fn init(provider: P) -> Self {
38        let mut manager = MemoryManager { provider };
39
40        // Check whether two pages are already allocated.
41        if manager.provider.pages() >= 2 {
42            return manager;
43        }
44
45        // Request at least 2 pages for header and ACL.
46        if let Err(err) = manager.provider.grow(2) {
47            panic!("Failed to grow memory during initialization: {err}");
48        }
49
50        manager
51    }
52
53    /// Returns the ACL page number.
54    pub const fn acl_page(&self) -> Page {
55        ACL_PAGE
56    }
57
58    /// Returns the schema page.
59    pub const fn schema_page(&self) -> Page {
60        SCHEMA_PAGE
61    }
62
63    /// Writes raw bytes at the specified page and offset, bypassing
64    /// alignment and encoding checks.
65    ///
66    /// This is used by the journal rollback path which already holds
67    /// pre-captured byte buffers.
68    pub fn write_at_raw(&mut self, page: Page, offset: PageOffset, buf: &[u8]) -> MemoryResult<()> {
69        let absolute_offset = self.absolute_offset(page, offset);
70        self.provider.write(absolute_offset, buf)
71    }
72
73    /// Gets the last allocated page number.
74    fn last_page(&self) -> Option<Page> {
75        match self.provider.pages() {
76            0 => None,
77            n => Some(n as Page - 1),
78        }
79    }
80
81    /// Calculates the absolute offset in memory given a page number and an
82    /// offset within that page.
83    fn absolute_offset(&self, page: Page, offset: PageOffset) -> u64 {
84        (page as u64)
85            .checked_mul(P::PAGE_SIZE)
86            .and_then(|page_offset| page_offset.checked_add(offset as u64))
87            .expect("Overflow when calculating absolute offset")
88    }
89
90    /// Checks if the specified page is allocated.
91    fn check_unallocated_page(
92        &self,
93        page: Page,
94        offset: PageOffset,
95        data_size: MSize,
96    ) -> MemoryResult<()> {
97        if self.last_page().is_none_or(|last_page| page > last_page) {
98            return Err(MemoryError::SegmentationFault {
99                page,
100                offset,
101                data_size: data_size as u64,
102                page_size: P::PAGE_SIZE,
103            });
104        }
105        Ok(())
106    }
107
108    /// Checks if the given offset is aligned according to the alignment
109    /// requirement of type `E`.
110    fn check_alignment<E>(&self, offset: PageOffset) -> MemoryResult<()>
111    where
112        E: Encode,
113    {
114        let alignment = E::ALIGNMENT as PageOffset;
115        if alignment != 0 && !offset.is_multiple_of(alignment) {
116            return Err(MemoryError::OffsetNotAligned { offset, alignment });
117        }
118        Ok(())
119    }
120}
121
122impl<P> MemoryAccess for MemoryManager<P>
123where
124    P: MemoryProvider,
125{
126    fn page_size(&self) -> u64 {
127        P::PAGE_SIZE
128    }
129
130    fn allocate_page(&mut self) -> MemoryResult<Page> {
131        self.provider.grow(1)?;
132
133        // Zero the newly allocated page.
134        self.provider.write(
135            self.absolute_offset(self.last_page().unwrap_or(0), 0),
136            &vec![0u8; P::PAGE_SIZE as usize],
137        )?;
138
139        match self.last_page() {
140            Some(page) => Ok(page),
141            None => Err(MemoryError::FailedToAllocatePage),
142        }
143    }
144
145    fn read_at<D>(&self, page: Page, offset: PageOffset) -> MemoryResult<D>
146    where
147        D: Encode,
148    {
149        self.check_alignment::<D>(offset)?;
150
151        let mut buf = vec![
152            0u8;
153            match D::SIZE {
154                DataSize::Fixed(size) => size as usize,
155                DataSize::Dynamic => (P::PAGE_SIZE as usize).saturating_sub(offset as usize),
156            }
157        ];
158
159        self.read_at_raw(page, offset, &mut buf)?;
160
161        D::decode(std::borrow::Cow::Owned(buf))
162    }
163
164    fn write_at<E>(&mut self, page: Page, offset: PageOffset, data: &E) -> MemoryResult<()>
165    where
166        E: Encode,
167    {
168        self.check_unallocated_page(page, offset, data.size())?;
169        self.check_alignment::<E>(offset)?;
170
171        let encoded = data.encode();
172
173        if offset as u64 + encoded.len() as u64 > P::PAGE_SIZE {
174            return Err(MemoryError::SegmentationFault {
175                page,
176                offset,
177                data_size: encoded.len() as u64,
178                page_size: P::PAGE_SIZE,
179            });
180        }
181
182        let absolute_offset = self.absolute_offset(page, offset);
183        self.provider.write(absolute_offset, encoded.as_ref())?;
184
185        // Zero padding bytes if any.
186        let padding = align_up::<E>(encoded.len()) - encoded.len();
187        if padding > 0 {
188            let padding_offset = absolute_offset + encoded.len() as u64;
189            let padding_buffer = vec![0u8; padding];
190            self.provider
191                .write(padding_offset, padding_buffer.as_ref())?;
192        }
193
194        Ok(())
195    }
196
197    fn zero<E>(&mut self, page: Page, offset: PageOffset, data: &E) -> MemoryResult<()>
198    where
199        E: Encode,
200    {
201        self.check_unallocated_page(page, offset, data.size())?;
202        self.check_alignment::<E>(offset)?;
203
204        let length = align_up::<E>(data.size() as usize);
205
206        if offset as u64 + (length as u64) > P::PAGE_SIZE {
207            return Err(MemoryError::SegmentationFault {
208                page,
209                offset,
210                data_size: data.size() as u64,
211                page_size: P::PAGE_SIZE,
212            });
213        }
214
215        let absolute_offset = self.absolute_offset(page, offset);
216        let buffer = vec![0u8; length];
217        self.provider.write(absolute_offset, buffer.as_ref())
218    }
219
220    fn read_at_raw(&self, page: Page, offset: PageOffset, buf: &mut [u8]) -> MemoryResult<usize> {
221        if self.last_page().is_none_or(|last_page| page > last_page) {
222            return Err(MemoryError::SegmentationFault {
223                page,
224                offset,
225                data_size: buf.len() as u64,
226                page_size: P::PAGE_SIZE,
227            });
228        }
229
230        let read_len = ((P::PAGE_SIZE - offset as u64) as usize).min(buf.len());
231
232        let absolute_offset = self.absolute_offset(page, offset);
233        self.provider
234            .read(absolute_offset, buf[..read_len].as_mut())?;
235
236        Ok(read_len)
237    }
238}
239
240/// Gets the padding at the given offset to the next multiple of
241/// [`E::ALIGNMENT`].
242#[inline]
243pub const fn align_up<E>(offset: usize) -> usize
244where
245    E: Encode,
246{
247    let alignment = E::ALIGNMENT as usize;
248    offset.div_ceil(alignment) * alignment
249}
250
251#[cfg(test)]
252mod tests {
253    use std::borrow::Cow;
254
255    use wasm_dbms_api::prelude::{
256        DEFAULT_ALIGNMENT, DataSize, MSize, MemoryError, MemoryResult, PageOffset, Text,
257    };
258
259    use super::*;
260    use crate::provider::HeapMemoryProvider;
261
262    fn make_mm() -> MemoryManager<HeapMemoryProvider> {
263        MemoryManager::init(HeapMemoryProvider::default())
264    }
265
266    #[test]
267    fn test_should_init_memory_manager() {
268        let mm = make_mm();
269        assert_eq!(mm.last_page(), Some(1));
270    }
271
272    #[test]
273    fn test_should_get_last_page() {
274        let mm = make_mm();
275        let last_page = mm.last_page();
276        assert_eq!(last_page, Some(1)); // header and ACL pages
277    }
278
279    #[test]
280    fn test_should_get_memory_page_size() {
281        let mm = make_mm();
282        let page_size = mm.page_size();
283        assert_eq!(page_size, HeapMemoryProvider::PAGE_SIZE);
284    }
285
286    #[test]
287    fn test_should_write_and_read_fixed_data_size() {
288        let mut mm = make_mm();
289        let data_to_write = FixedSizeData { a: 42, b: 1337 };
290        mm.write_at(ACL_PAGE, 0, &data_to_write)
291            .expect("Failed to write data to ACL page");
292
293        let out: FixedSizeData = mm
294            .read_at(ACL_PAGE, 0)
295            .expect("Failed to read data from ACL page");
296
297        assert_eq!(out, data_to_write);
298    }
299
300    #[test]
301    fn test_write_should_zero_padding() {
302        let mut mm = make_mm();
303        let data_to_write = Text("very_long_string".to_string());
304        mm.write_at(ACL_PAGE, 0, &data_to_write)
305            .expect("Failed to write data to ACL page");
306
307        let mut buffer = vec![0; 32];
308        mm.read_at_raw(ACL_PAGE, 0, &mut buffer)
309            .expect("Failed to read data from ACL page");
310
311        let non_zero_count = buffer.iter().filter(|&&b| b != 0).count();
312        assert_eq!(non_zero_count, data_to_write.size() as usize - 1);
313
314        let data_to_write_short = Text("short".to_string());
315        mm.write_at(ACL_PAGE, 0, &data_to_write_short)
316            .expect("Failed to write data to ACL page");
317
318        let mut buffer = vec![0; 32];
319        mm.read_at_raw(ACL_PAGE, 0, &mut buffer)
320            .expect("Failed to read data from ACL page");
321
322        let non_zero_count = buffer.iter().filter(|&&b| b != 0).count();
323        assert_eq!(non_zero_count, data_to_write_short.size() as usize - 1);
324    }
325
326    #[test]
327    fn test_should_zero_data() {
328        let mut mm = make_mm();
329        let data_to_write = FixedSizeData { a: 100, b: 200 };
330        mm.write_at(ACL_PAGE, 48, &data_to_write)
331            .expect("Failed to write data to ACL page");
332
333        mm.zero(ACL_PAGE, 48, &data_to_write)
334            .expect("Failed to zero data on ACL page");
335
336        let mut buffer = vec![0; 50];
337        mm.read_at_raw(ACL_PAGE, 48, &mut buffer)
338            .expect("Failed to read data from ACL page");
339
340        assert!(buffer.iter().all(|&b| b == 0));
341    }
342
343    #[test]
344    fn test_should_zero_with_alignment() {
345        let mut mm = make_mm();
346        let data_to_write = FixedSizeData { a: 100, b: 200 };
347        mm.write_at(ACL_PAGE, 0, &data_to_write)
348            .expect("Failed to write data to ACL page");
349        let data_to_write = FixedSizeData { a: 100, b: 200 };
350        mm.write_at(ACL_PAGE, 6, &data_to_write)
351            .expect("Failed to write data to ACL page");
352
353        let data_with_alignment = DataWithAlignment { a: 100, b: 200 };
354        mm.zero(ACL_PAGE, 0, &data_with_alignment)
355            .expect("Failed to zero data on ACL page");
356
357        let mut buffer = vec![0; 32];
358        mm.read_at_raw(ACL_PAGE, 0, &mut buffer)
359            .expect("Failed to read data from ACL page");
360        assert!(
361            buffer.iter().all(|&b| b == 0),
362            "First 32 bytes are not zeroed"
363        );
364    }
365
366    #[test]
367    fn test_should_check_whether_write_is_aligned() {
368        let mut mm = make_mm();
369        let data_to_write = FixedSizeData { a: 100, b: 200 };
370        let res = mm.write_at(ACL_PAGE, 2, &data_to_write);
371        assert!(matches!(res, Err(MemoryError::OffsetNotAligned { .. })));
372    }
373
374    #[test]
375    fn test_should_check_whether_read_is_aligned() {
376        let mm = make_mm();
377        let result: MemoryResult<FixedSizeData> = mm.read_at(ACL_PAGE, 3);
378        assert!(matches!(result, Err(MemoryError::OffsetNotAligned { .. })));
379    }
380
381    #[test]
382    fn test_should_check_whether_zero_is_aligned() {
383        let mut mm = make_mm();
384        let data_to_zero = FixedSizeData { a: 1, b: 2 };
385        let result = mm.zero(ACL_PAGE, 5, &data_to_zero);
386        assert!(matches!(result, Err(MemoryError::OffsetNotAligned { .. })));
387    }
388
389    #[test]
390    fn test_should_not_zero_unallocated_page() {
391        let mut mm = make_mm();
392        let data_to_zero = FixedSizeData { a: 1, b: 2 };
393        let result = mm.zero(10, 0, &data_to_zero);
394        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
395    }
396
397    #[test]
398    fn test_should_not_zero_out_of_bounds() {
399        let mut mm = make_mm();
400        let data_to_zero = FixedSizeData { a: 1, b: 2 };
401        let result = mm.zero(
402            ACL_PAGE,
403            (HeapMemoryProvider::PAGE_SIZE - 4) as PageOffset,
404            &data_to_zero,
405        );
406        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
407    }
408
409    #[test]
410    fn test_should_read_raw() {
411        let mut mm = make_mm();
412        let data_to_write = vec![1u8, 2, 3, 4, 5];
413        mm.write_at_raw(ACL_PAGE, 20, &data_to_write)
414            .expect("Failed to write raw data to ACL page");
415
416        let mut buf = vec![0u8; 5];
417        mm.read_at_raw(ACL_PAGE, 20, &mut buf)
418            .expect("Failed to read raw data from ACL page");
419
420        assert_eq!(buf, data_to_write);
421    }
422
423    #[test]
424    fn test_should_fail_out_of_bounds_access() {
425        let mut mm = make_mm();
426        let data_to_write = FixedSizeData { a: 1, b: 2 };
427        let result = mm.write_at(
428            ACL_PAGE,
429            (HeapMemoryProvider::PAGE_SIZE - 4) as PageOffset,
430            &data_to_write,
431        );
432        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
433
434        let result: MemoryResult<FixedSizeData> = mm.read_at(10, 0);
435        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
436        let result = mm.write_at(10, 0, &data_to_write);
437        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
438    }
439
440    #[test]
441    fn test_should_allocate_new_page() {
442        let mut mm = make_mm();
443        let initial_last_page = mm.last_page().unwrap();
444        let new_page = mm.allocate_page().expect("Failed to allocate new page");
445        assert_eq!(new_page, initial_last_page + 1);
446        let updated_last_page = mm.last_page().unwrap();
447        assert_eq!(updated_last_page, new_page);
448    }
449
450    #[test]
451    fn test_should_check_unallocated_page() {
452        let mm = make_mm();
453        let result = mm.check_unallocated_page(100, 0, 10);
454        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
455
456        let last_page = mm.last_page().unwrap();
457        let result = mm.check_unallocated_page(last_page, 0, 10);
458        assert!(result.is_ok());
459    }
460
461    #[test]
462    fn test_should_compute_padding() {
463        assert_eq!(align_up::<DataWithAlignment>(0), 0);
464        assert_eq!(align_up::<DataWithAlignment>(1), 32);
465        assert_eq!(align_up::<DataWithAlignment>(2), 32);
466        assert_eq!(align_up::<DataWithAlignment>(3), 32);
467        assert_eq!(align_up::<DataWithAlignment>(31), 32);
468        assert_eq!(align_up::<DataWithAlignment>(32), 32);
469        assert_eq!(align_up::<DataWithAlignment>(48), 64);
470        assert_eq!(align_up::<DataWithAlignment>(147), 160);
471    }
472
473    #[derive(Debug, Clone, PartialEq)]
474    struct FixedSizeData {
475        a: u16,
476        b: u32,
477    }
478
479    impl Encode for FixedSizeData {
480        const SIZE: DataSize = DataSize::Fixed(6);
481        const ALIGNMENT: PageOffset = 6;
482
483        fn encode(&'_ self) -> Cow<'_, [u8]> {
484            let mut buf = vec![0u8; self.size() as usize];
485            buf[0..2].copy_from_slice(&self.a.to_le_bytes());
486            buf[2..6].copy_from_slice(&self.b.to_le_bytes());
487            Cow::Owned(buf)
488        }
489
490        fn decode(data: Cow<[u8]>) -> MemoryResult<Self>
491        where
492            Self: Sized,
493        {
494            let a = u16::from_le_bytes([data[0], data[1]]);
495            let b = u32::from_le_bytes([data[2], data[3], data[4], data[5]]);
496            Ok(FixedSizeData { a, b })
497        }
498
499        fn size(&self) -> MSize {
500            6
501        }
502    }
503
504    #[derive(Debug, Clone, PartialEq)]
505    struct DataWithAlignment {
506        a: u16,
507        b: u32,
508    }
509
510    impl Encode for DataWithAlignment {
511        const SIZE: DataSize = DataSize::Dynamic;
512        const ALIGNMENT: PageOffset = DEFAULT_ALIGNMENT;
513
514        fn encode(&'_ self) -> Cow<'_, [u8]> {
515            let mut buf = vec![0u8; self.size() as usize];
516            buf[0..2].copy_from_slice(&self.a.to_le_bytes());
517            buf[2..6].copy_from_slice(&self.b.to_le_bytes());
518            Cow::Owned(buf)
519        }
520
521        fn decode(data: Cow<[u8]>) -> MemoryResult<Self>
522        where
523            Self: Sized,
524        {
525            let a = u16::from_le_bytes([data[0], data[1]]);
526            let b = u32::from_le_bytes([data[2], data[3], data[4], data[5]]);
527            Ok(DataWithAlignment { a, b })
528        }
529
530        fn size(&self) -> MSize {
531            6
532        }
533    }
534}