Skip to main content

wasm_dbms_memory/
memory_manager.rs

1// Rust guideline compliant 2026-03-01
2// X-WHERE-CLAUSE, M-CANONICAL-DOCS
3
4//! Memory manager for page-level memory operations.
5
6use wasm_dbms_api::prelude::{
7    DataSize, Encode, MSize, MemoryError, MemoryResult, Page, PageOffset,
8};
9
10use crate::memory_access::MemoryAccess;
11use crate::provider::MemoryProvider;
12
13/// Schema page (reserved page 0).
14pub const SCHEMA_PAGE: Page = 0;
15/// ACL page (reserved page 1).
16pub const ACL_PAGE: Page = 1;
17
18/// The memory manager handles page-level memory operations on top of a
19/// [`MemoryProvider`].
20pub struct MemoryManager<P>
21where
22    P: MemoryProvider,
23{
24    provider: P,
25}
26
27impl<P> MemoryManager<P>
28where
29    P: MemoryProvider,
30{
31    /// Initializes the memory manager and allocates the header and reserved
32    /// pages.
33    ///
34    /// # Panics
35    ///
36    /// Panics if the memory provider fails to initialize.
37    pub fn init(provider: P) -> Self {
38        let mut manager = MemoryManager { provider };
39
40        // Check whether two pages are already allocated.
41        if manager.provider.pages() >= 2 {
42            return manager;
43        }
44
45        // Request at least 2 pages for header and ACL.
46        if let Err(err) = manager.provider.grow(2) {
47            panic!("Failed to grow memory during initialization: {err}");
48        }
49
50        manager
51    }
52
53    /// Returns the ACL page number.
54    pub const fn acl_page(&self) -> Page {
55        ACL_PAGE
56    }
57
58    /// Returns the schema page.
59    pub const fn schema_page(&self) -> Page {
60        SCHEMA_PAGE
61    }
62
63    /// Gets the last allocated page number.
64    fn last_page(&self) -> Option<Page> {
65        match self.provider.pages() {
66            0 => None,
67            n => Some(n as Page - 1),
68        }
69    }
70
71    /// Calculates the absolute offset in memory given a page number and an
72    /// offset within that page.
73    fn absolute_offset(&self, page: Page, offset: PageOffset) -> u64 {
74        (page as u64)
75            .checked_mul(P::PAGE_SIZE)
76            .and_then(|page_offset| page_offset.checked_add(offset as u64))
77            .expect("Overflow when calculating absolute offset")
78    }
79
80    /// Checks if the specified page is allocated.
81    fn check_unallocated_page(
82        &self,
83        page: Page,
84        offset: PageOffset,
85        data_size: MSize,
86    ) -> MemoryResult<()> {
87        if self.last_page().is_none_or(|last_page| page > last_page) {
88            return Err(MemoryError::SegmentationFault {
89                page,
90                offset,
91                data_size: data_size as u64,
92                page_size: P::PAGE_SIZE,
93            });
94        }
95        Ok(())
96    }
97
98    /// Checks if the given offset is aligned according to the alignment
99    /// requirement of type `E`.
100    fn check_alignment<E>(&self, offset: PageOffset) -> MemoryResult<()>
101    where
102        E: Encode,
103    {
104        let alignment = E::ALIGNMENT as PageOffset;
105        if alignment != 0 && !offset.is_multiple_of(alignment) {
106            return Err(MemoryError::OffsetNotAligned { offset, alignment });
107        }
108        Ok(())
109    }
110}
111
112impl<P> MemoryAccess for MemoryManager<P>
113where
114    P: MemoryProvider,
115{
116    fn page_size(&self) -> u64 {
117        P::PAGE_SIZE
118    }
119
120    fn allocate_page(&mut self) -> MemoryResult<Page> {
121        self.provider.grow(1)?;
122
123        // Zero the newly allocated page.
124        self.provider.write(
125            self.absolute_offset(self.last_page().unwrap_or(0), 0),
126            &vec![0u8; P::PAGE_SIZE as usize],
127        )?;
128
129        match self.last_page() {
130            Some(page) => Ok(page),
131            None => Err(MemoryError::FailedToAllocatePage),
132        }
133    }
134
135    fn read_at<D>(&mut self, page: Page, offset: PageOffset) -> MemoryResult<D>
136    where
137        D: Encode,
138    {
139        self.check_alignment::<D>(offset)?;
140
141        let mut buf = vec![
142            0u8;
143            match D::SIZE {
144                DataSize::Fixed(size) => size as usize,
145                DataSize::Dynamic => (P::PAGE_SIZE as usize).saturating_sub(offset as usize),
146            }
147        ];
148
149        self.read_at_raw(page, offset, &mut buf)?;
150
151        D::decode(std::borrow::Cow::Owned(buf))
152    }
153
154    fn write_at<E>(&mut self, page: Page, offset: PageOffset, data: &E) -> MemoryResult<()>
155    where
156        E: Encode,
157    {
158        self.check_unallocated_page(page, offset, data.size())?;
159        self.check_alignment::<E>(offset)?;
160
161        let encoded = data.encode();
162
163        if offset as u64 + encoded.len() as u64 > P::PAGE_SIZE {
164            return Err(MemoryError::SegmentationFault {
165                page,
166                offset,
167                data_size: encoded.len() as u64,
168                page_size: P::PAGE_SIZE,
169            });
170        }
171
172        let absolute_offset = self.absolute_offset(page, offset);
173        self.provider.write(absolute_offset, encoded.as_ref())?;
174
175        // Zero padding bytes if any.
176        let padding = align_up::<E>(encoded.len()) - encoded.len();
177        if padding > 0 {
178            let padding_offset = absolute_offset + encoded.len() as u64;
179            let padding_buffer = vec![0u8; padding];
180            self.provider
181                .write(padding_offset, padding_buffer.as_ref())?;
182        }
183
184        Ok(())
185    }
186
187    fn write_at_raw(&mut self, page: Page, offset: PageOffset, buf: &[u8]) -> MemoryResult<()> {
188        self.check_unallocated_page(page, offset, buf.len() as MSize)?;
189
190        if offset as u64 + buf.len() as u64 > P::PAGE_SIZE {
191            return Err(MemoryError::SegmentationFault {
192                page,
193                offset,
194                data_size: buf.len() as u64,
195                page_size: P::PAGE_SIZE,
196            });
197        }
198
199        let absolute_offset = self.absolute_offset(page, offset);
200        self.provider.write(absolute_offset, buf)
201    }
202
203    fn zero<E>(&mut self, page: Page, offset: PageOffset, data: &E) -> MemoryResult<()>
204    where
205        E: Encode,
206    {
207        self.check_unallocated_page(page, offset, data.size())?;
208        self.check_alignment::<E>(offset)?;
209
210        let length = align_up::<E>(data.size() as usize);
211
212        if offset as u64 + (length as u64) > P::PAGE_SIZE {
213            return Err(MemoryError::SegmentationFault {
214                page,
215                offset,
216                data_size: data.size() as u64,
217                page_size: P::PAGE_SIZE,
218            });
219        }
220
221        let absolute_offset = self.absolute_offset(page, offset);
222        let buffer = vec![0u8; length];
223        self.provider.write(absolute_offset, buffer.as_ref())
224    }
225
226    fn read_at_raw(
227        &mut self,
228        page: Page,
229        offset: PageOffset,
230        buf: &mut [u8],
231    ) -> MemoryResult<usize> {
232        if self.last_page().is_none_or(|last_page| page > last_page) {
233            return Err(MemoryError::SegmentationFault {
234                page,
235                offset,
236                data_size: buf.len() as u64,
237                page_size: P::PAGE_SIZE,
238            });
239        }
240
241        let read_len = ((P::PAGE_SIZE - offset as u64) as usize).min(buf.len());
242
243        let absolute_offset = self.absolute_offset(page, offset);
244        self.provider
245            .read(absolute_offset, buf[..read_len].as_mut())?;
246
247        Ok(read_len)
248    }
249}
250
251/// Gets the padding at the given offset to the next multiple of
252/// [`E::ALIGNMENT`].
253#[inline]
254pub const fn align_up<E>(offset: usize) -> usize
255where
256    E: Encode,
257{
258    let alignment = E::ALIGNMENT as usize;
259    offset.div_ceil(alignment) * alignment
260}
261
262#[cfg(test)]
263mod tests {
264    use std::borrow::Cow;
265
266    use wasm_dbms_api::prelude::{
267        DEFAULT_ALIGNMENT, DataSize, MSize, MemoryError, MemoryResult, PageOffset, Text,
268    };
269
270    use super::*;
271    use crate::provider::HeapMemoryProvider;
272
273    fn make_mm() -> MemoryManager<HeapMemoryProvider> {
274        MemoryManager::init(HeapMemoryProvider::default())
275    }
276
277    #[test]
278    fn test_should_init_memory_manager() {
279        let mm = make_mm();
280        assert_eq!(mm.last_page(), Some(1));
281    }
282
283    #[test]
284    fn test_should_get_last_page() {
285        let mm = make_mm();
286        let last_page = mm.last_page();
287        assert_eq!(last_page, Some(1)); // header and ACL pages
288    }
289
290    #[test]
291    fn test_should_get_memory_page_size() {
292        let mm = make_mm();
293        let page_size = mm.page_size();
294        assert_eq!(page_size, HeapMemoryProvider::PAGE_SIZE);
295    }
296
297    #[test]
298    fn test_should_write_and_read_fixed_data_size() {
299        let mut mm = make_mm();
300        let data_to_write = FixedSizeData { a: 42, b: 1337 };
301        mm.write_at(ACL_PAGE, 0, &data_to_write)
302            .expect("Failed to write data to ACL page");
303
304        let out: FixedSizeData = mm
305            .read_at(ACL_PAGE, 0)
306            .expect("Failed to read data from ACL page");
307
308        assert_eq!(out, data_to_write);
309    }
310
311    #[test]
312    fn test_write_should_zero_padding() {
313        let mut mm = make_mm();
314        let data_to_write = Text("very_long_string".to_string());
315        mm.write_at(ACL_PAGE, 0, &data_to_write)
316            .expect("Failed to write data to ACL page");
317
318        let mut buffer = vec![0; 32];
319        mm.read_at_raw(ACL_PAGE, 0, &mut buffer)
320            .expect("Failed to read data from ACL page");
321
322        let non_zero_count = buffer.iter().filter(|&&b| b != 0).count();
323        assert_eq!(non_zero_count, data_to_write.size() as usize - 1);
324
325        let data_to_write_short = Text("short".to_string());
326        mm.write_at(ACL_PAGE, 0, &data_to_write_short)
327            .expect("Failed to write data to ACL page");
328
329        let mut buffer = vec![0; 32];
330        mm.read_at_raw(ACL_PAGE, 0, &mut buffer)
331            .expect("Failed to read data from ACL page");
332
333        let non_zero_count = buffer.iter().filter(|&&b| b != 0).count();
334        assert_eq!(non_zero_count, data_to_write_short.size() as usize - 1);
335    }
336
337    #[test]
338    fn test_should_zero_data() {
339        let mut mm = make_mm();
340        let data_to_write = FixedSizeData { a: 100, b: 200 };
341        mm.write_at(ACL_PAGE, 48, &data_to_write)
342            .expect("Failed to write data to ACL page");
343
344        mm.zero(ACL_PAGE, 48, &data_to_write)
345            .expect("Failed to zero data on ACL page");
346
347        let mut buffer = vec![0; 50];
348        mm.read_at_raw(ACL_PAGE, 48, &mut buffer)
349            .expect("Failed to read data from ACL page");
350
351        assert!(buffer.iter().all(|&b| b == 0));
352    }
353
354    #[test]
355    fn test_should_zero_with_alignment() {
356        let mut mm = make_mm();
357        let data_to_write = FixedSizeData { a: 100, b: 200 };
358        mm.write_at(ACL_PAGE, 0, &data_to_write)
359            .expect("Failed to write data to ACL page");
360        let data_to_write = FixedSizeData { a: 100, b: 200 };
361        mm.write_at(ACL_PAGE, 6, &data_to_write)
362            .expect("Failed to write data to ACL page");
363
364        let data_with_alignment = DataWithAlignment { a: 100, b: 200 };
365        mm.zero(ACL_PAGE, 0, &data_with_alignment)
366            .expect("Failed to zero data on ACL page");
367
368        let mut buffer = vec![0; 32];
369        mm.read_at_raw(ACL_PAGE, 0, &mut buffer)
370            .expect("Failed to read data from ACL page");
371        assert!(
372            buffer.iter().all(|&b| b == 0),
373            "First 32 bytes are not zeroed"
374        );
375    }
376
377    #[test]
378    fn test_should_check_whether_write_is_aligned() {
379        let mut mm = make_mm();
380        let data_to_write = FixedSizeData { a: 100, b: 200 };
381        let res = mm.write_at(ACL_PAGE, 2, &data_to_write);
382        assert!(matches!(res, Err(MemoryError::OffsetNotAligned { .. })));
383    }
384
385    #[test]
386    fn test_should_check_whether_read_is_aligned() {
387        let mut mm = make_mm();
388        let result: MemoryResult<FixedSizeData> = mm.read_at(ACL_PAGE, 3);
389        assert!(matches!(result, Err(MemoryError::OffsetNotAligned { .. })));
390    }
391
392    #[test]
393    fn test_should_check_whether_zero_is_aligned() {
394        let mut mm = make_mm();
395        let data_to_zero = FixedSizeData { a: 1, b: 2 };
396        let result = mm.zero(ACL_PAGE, 5, &data_to_zero);
397        assert!(matches!(result, Err(MemoryError::OffsetNotAligned { .. })));
398    }
399
400    #[test]
401    fn test_should_not_zero_unallocated_page() {
402        let mut mm = make_mm();
403        let data_to_zero = FixedSizeData { a: 1, b: 2 };
404        let result = mm.zero(10, 0, &data_to_zero);
405        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
406    }
407
408    #[test]
409    fn test_should_not_zero_out_of_bounds() {
410        let mut mm = make_mm();
411        let data_to_zero = FixedSizeData { a: 1, b: 2 };
412        let result = mm.zero(
413            ACL_PAGE,
414            (HeapMemoryProvider::PAGE_SIZE - 4) as PageOffset,
415            &data_to_zero,
416        );
417        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
418    }
419
420    #[test]
421    fn test_should_read_raw() {
422        let mut mm = make_mm();
423        let data_to_write = vec![1u8, 2, 3, 4, 5];
424        mm.write_at_raw(ACL_PAGE, 20, &data_to_write)
425            .expect("Failed to write raw data to ACL page");
426
427        let mut buf = vec![0u8; 5];
428        mm.read_at_raw(ACL_PAGE, 20, &mut buf)
429            .expect("Failed to read raw data from ACL page");
430
431        assert_eq!(buf, data_to_write);
432    }
433
434    #[test]
435    fn test_should_fail_out_of_bounds_access() {
436        let mut mm = make_mm();
437        let data_to_write = FixedSizeData { a: 1, b: 2 };
438        let result = mm.write_at(
439            ACL_PAGE,
440            (HeapMemoryProvider::PAGE_SIZE - 4) as PageOffset,
441            &data_to_write,
442        );
443        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
444
445        let result: MemoryResult<FixedSizeData> = mm.read_at(10, 0);
446        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
447        let result = mm.write_at(10, 0, &data_to_write);
448        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
449    }
450
451    #[test]
452    fn test_should_allocate_new_page() {
453        let mut mm = make_mm();
454        let initial_last_page = mm.last_page().unwrap();
455        let new_page = mm.allocate_page().expect("Failed to allocate new page");
456        assert_eq!(new_page, initial_last_page + 1);
457        let updated_last_page = mm.last_page().unwrap();
458        assert_eq!(updated_last_page, new_page);
459    }
460
461    #[test]
462    fn test_should_check_unallocated_page() {
463        let mm = make_mm();
464        let result = mm.check_unallocated_page(100, 0, 10);
465        assert!(matches!(result, Err(MemoryError::SegmentationFault { .. })));
466
467        let last_page = mm.last_page().unwrap();
468        let result = mm.check_unallocated_page(last_page, 0, 10);
469        assert!(result.is_ok());
470    }
471
472    #[test]
473    fn test_should_compute_padding() {
474        assert_eq!(align_up::<DataWithAlignment>(0), 0);
475        assert_eq!(align_up::<DataWithAlignment>(1), 32);
476        assert_eq!(align_up::<DataWithAlignment>(2), 32);
477        assert_eq!(align_up::<DataWithAlignment>(3), 32);
478        assert_eq!(align_up::<DataWithAlignment>(31), 32);
479        assert_eq!(align_up::<DataWithAlignment>(32), 32);
480        assert_eq!(align_up::<DataWithAlignment>(48), 64);
481        assert_eq!(align_up::<DataWithAlignment>(147), 160);
482    }
483
484    #[derive(Debug, Clone, PartialEq)]
485    struct FixedSizeData {
486        a: u16,
487        b: u32,
488    }
489
490    impl Encode for FixedSizeData {
491        const SIZE: DataSize = DataSize::Fixed(6);
492        const ALIGNMENT: PageOffset = 6;
493
494        fn encode(&'_ self) -> Cow<'_, [u8]> {
495            let mut buf = vec![0u8; self.size() as usize];
496            buf[0..2].copy_from_slice(&self.a.to_le_bytes());
497            buf[2..6].copy_from_slice(&self.b.to_le_bytes());
498            Cow::Owned(buf)
499        }
500
501        fn decode(data: Cow<[u8]>) -> MemoryResult<Self>
502        where
503            Self: Sized,
504        {
505            let a = u16::from_le_bytes([data[0], data[1]]);
506            let b = u32::from_le_bytes([data[2], data[3], data[4], data[5]]);
507            Ok(FixedSizeData { a, b })
508        }
509
510        fn size(&self) -> MSize {
511            6
512        }
513    }
514
515    #[derive(Debug, Clone, PartialEq)]
516    struct DataWithAlignment {
517        a: u16,
518        b: u32,
519    }
520
521    impl Encode for DataWithAlignment {
522        const SIZE: DataSize = DataSize::Dynamic;
523        const ALIGNMENT: PageOffset = DEFAULT_ALIGNMENT;
524
525        fn encode(&'_ self) -> Cow<'_, [u8]> {
526            let mut buf = vec![0u8; self.size() as usize];
527            buf[0..2].copy_from_slice(&self.a.to_le_bytes());
528            buf[2..6].copy_from_slice(&self.b.to_le_bytes());
529            Cow::Owned(buf)
530        }
531
532        fn decode(data: Cow<[u8]>) -> MemoryResult<Self>
533        where
534            Self: Sized,
535        {
536            let a = u16::from_le_bytes([data[0], data[1]]);
537            let b = u32::from_le_bytes([data[2], data[3], data[4], data[5]]);
538            Ok(DataWithAlignment { a, b })
539        }
540
541        fn size(&self) -> MSize {
542            6
543        }
544    }
545}