1use crate::{WasmResult, WasmTrap, PAGE_SIZE};
16
17pub struct IsolatedMemory<const MAX_PAGES: usize> {
22 pages: [[u8; PAGE_SIZE]; MAX_PAGES],
25 active_pages: usize,
29}
30
31impl<const MAX_PAGES: usize> IsolatedMemory<MAX_PAGES> {
32 #[inline(never)]
37 pub fn try_new(initial_pages: usize) -> Result<Self, crate::ConstructionError> {
38 if initial_pages > MAX_PAGES {
39 return Err(crate::ConstructionError::MemoryInitialPagesExceedsMax {
40 initial: initial_pages,
41 max: MAX_PAGES,
42 });
43 }
44 Ok(Self {
45 pages: [[0u8; PAGE_SIZE]; MAX_PAGES],
46 active_pages: initial_pages,
47 })
48 }
49
50 #[inline(never)]
59 pub fn try_init(
60 slot: &mut core::mem::MaybeUninit<Self>,
61 initial_pages: usize,
62 ) -> Result<(), crate::ConstructionError> {
63 if initial_pages > MAX_PAGES {
64 return Err(crate::ConstructionError::MemoryInitialPagesExceedsMax {
65 initial: initial_pages,
66 max: MAX_PAGES,
67 });
68 }
69 let ptr = slot.as_mut_ptr();
70 unsafe {
74 core::ptr::addr_of_mut!((*ptr).pages).write_bytes(0, 1);
77 core::ptr::addr_of_mut!((*ptr).active_pages).write(initial_pages);
78 }
79 Ok(())
80 }
81
82 #[inline(always)]
84 pub fn page_count(&self) -> usize {
85 self.active_pages
86 }
87
88 #[inline(always)]
90 pub fn active_size(&self) -> usize {
91 self.active_pages * PAGE_SIZE
92 }
93
94 pub fn grow(&mut self, delta: u32) -> i32 {
97 let old = self.active_pages;
98 let new = old.wrapping_add(delta as usize);
99 if new > MAX_PAGES {
100 return -1;
101 }
102 for page in &mut self.pages[old..new] {
104 page.fill(0);
105 }
106 self.active_pages = new;
107 old as i32
108 }
109
110 #[inline(always)]
112 pub fn size(&self) -> i32 {
113 self.active_pages as i32
114 }
115
116 #[inline(always)]
118 fn flat(&self) -> &[u8] {
119 self.pages.as_flattened()
120 }
121
122 #[inline(always)]
124 fn flat_mut(&mut self) -> &mut [u8] {
125 self.pages.as_flattened_mut()
126 }
127
128 pub fn memory_copy(&mut self, dst: u32, src: u32, len: u32) -> WasmResult<()> {
136 let active = self.active_size();
137 let dst = dst as usize;
138 let src = src as usize;
139 let len = len as usize;
140 if src.checked_add(len).is_none_or(|end| end > active)
141 || dst.checked_add(len).is_none_or(|end| end > active)
142 {
143 return Err(WasmTrap::OutOfBounds);
144 }
145 self.flat_mut().copy_within(src..src + len, dst);
146 Ok(())
147 }
148
149 pub fn fill(&mut self, dst: usize, val: u8, len: usize) -> WasmResult<()> {
154 let active = self.active_size();
155 fill_inner(self.flat_mut(), active, dst, val, len)
156 }
157
158 pub fn init_data_partial(
166 &mut self,
167 dst: usize,
168 data: &[u8],
169 src_offset: usize,
170 len: usize,
171 ) -> WasmResult<()> {
172 let active = self.active_size();
173 init_data_partial_inner(self.flat_mut(), active, dst, data, src_offset, len)
174 }
175
176 #[inline(always)]
180 pub fn load_i32(&self, offset: usize) -> WasmResult<i32> {
181 load_i32_inner(self.flat(), self.active_size(), offset)
182 }
183
184 #[inline(always)]
186 pub fn load_i64(&self, offset: usize) -> WasmResult<i64> {
187 load_i64_inner(self.flat(), self.active_size(), offset)
188 }
189
190 #[inline(always)]
192 pub fn load_u8(&self, offset: usize) -> WasmResult<u8> {
193 load_u8_inner(self.flat(), self.active_size(), offset)
194 }
195
196 #[inline(always)]
198 pub fn load_u16(&self, offset: usize) -> WasmResult<u16> {
199 load_u16_inner(self.flat(), self.active_size(), offset)
200 }
201
202 #[inline(always)]
204 pub fn load_f32(&self, offset: usize) -> WasmResult<f32> {
205 load_f32_inner(self.flat(), self.active_size(), offset)
206 }
207
208 #[inline(always)]
210 pub fn load_f64(&self, offset: usize) -> WasmResult<f64> {
211 load_f64_inner(self.flat(), self.active_size(), offset)
212 }
213
214 #[inline(always)]
216 pub fn store_i32(&mut self, offset: usize, value: i32) -> WasmResult<()> {
217 let active = self.active_size();
218 store_i32_inner(self.flat_mut(), active, offset, value)
219 }
220
221 #[inline(always)]
223 pub fn store_i64(&mut self, offset: usize, value: i64) -> WasmResult<()> {
224 let active = self.active_size();
225 store_i64_inner(self.flat_mut(), active, offset, value)
226 }
227
228 #[inline(always)]
230 pub fn store_u8(&mut self, offset: usize, value: u8) -> WasmResult<()> {
231 let active = self.active_size();
232 store_u8_inner(self.flat_mut(), active, offset, value)
233 }
234
235 #[inline(always)]
237 pub fn store_u16(&mut self, offset: usize, value: u16) -> WasmResult<()> {
238 let active = self.active_size();
239 store_u16_inner(self.flat_mut(), active, offset, value)
240 }
241
242 #[inline(always)]
244 pub fn store_f32(&mut self, offset: usize, value: f32) -> WasmResult<()> {
245 let active = self.active_size();
246 store_f32_inner(self.flat_mut(), active, offset, value)
247 }
248
249 #[inline(always)]
251 pub fn store_f64(&mut self, offset: usize, value: f64) -> WasmResult<()> {
252 let active = self.active_size();
253 store_f64_inner(self.flat_mut(), active, offset, value)
254 }
255
256 #[inline(always)]
266 pub fn init_data(&mut self, offset: usize, data: &[u8]) -> WasmResult<()> {
267 let active = self.active_size();
268 init_data_inner(self.flat_mut(), active, offset, data)
269 }
270
271 #[inline(always)]
281 pub unsafe fn load_i32_unchecked(&self, offset: usize) -> i32 {
282 load_i32_unchecked_inner(self.flat(), offset)
283 }
284
285 #[inline(always)]
290 pub unsafe fn load_i64_unchecked(&self, offset: usize) -> i64 {
291 load_i64_unchecked_inner(self.flat(), offset)
292 }
293
294 #[inline(always)]
299 pub unsafe fn store_i32_unchecked(&mut self, offset: usize, value: i32) {
300 store_i32_unchecked_inner(self.flat_mut(), offset, value)
301 }
302
303 #[inline(always)]
308 pub unsafe fn store_i64_unchecked(&mut self, offset: usize, value: i64) {
309 store_i64_unchecked_inner(self.flat_mut(), offset, value)
310 }
311
312 #[inline(always)]
314 pub fn as_slice(&self) -> &[u8] {
315 &self.flat()[..self.active_size()]
316 }
317
318 #[inline(always)]
320 pub fn as_mut_slice(&mut self) -> &mut [u8] {
321 let size = self.active_size();
322 &mut self.flat_mut()[..size]
323 }
324}
325
326#[inline(always)]
331fn checked_slice(
332 memory: &[u8],
333 active_bytes: usize,
334 offset: usize,
335 len: usize,
336) -> WasmResult<&[u8]> {
337 let end = offset.checked_add(len).ok_or(WasmTrap::OutOfBounds)?;
338 if end > active_bytes {
339 return Err(WasmTrap::OutOfBounds);
340 }
341 memory.get(offset..end).ok_or(WasmTrap::OutOfBounds)
345}
346
347#[inline(always)]
349fn checked_slice_mut(
350 memory: &mut [u8],
351 active_bytes: usize,
352 offset: usize,
353 len: usize,
354) -> WasmResult<&mut [u8]> {
355 let end = offset.checked_add(len).ok_or(WasmTrap::OutOfBounds)?;
356 if end > active_bytes {
357 return Err(WasmTrap::OutOfBounds);
358 }
359 memory.get_mut(offset..end).ok_or(WasmTrap::OutOfBounds)
360}
361
362#[inline(always)]
365fn to_array<const N: usize>(slice: &[u8]) -> WasmResult<[u8; N]> {
366 slice.try_into().map_err(|_| WasmTrap::OutOfBounds)
367}
368
369#[inline(never)]
378fn load_i32_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<i32> {
379 let s = checked_slice(memory, active_bytes, offset, 4)?;
380 Ok(i32::from_le_bytes(to_array(s)?))
381}
382
383#[inline(never)]
384fn load_i64_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<i64> {
385 let s = checked_slice(memory, active_bytes, offset, 8)?;
386 Ok(i64::from_le_bytes(to_array(s)?))
387}
388
389#[inline(never)]
390fn load_u8_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<u8> {
391 let s = checked_slice(memory, active_bytes, offset, 1)?;
392 Ok(s[0])
393}
394
395#[inline(never)]
396fn load_u16_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<u16> {
397 let s = checked_slice(memory, active_bytes, offset, 2)?;
398 Ok(u16::from_le_bytes(to_array(s)?))
399}
400
401#[inline(never)]
402fn load_f32_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<f32> {
403 let s = checked_slice(memory, active_bytes, offset, 4)?;
404 Ok(f32::from_le_bytes(to_array(s)?))
405}
406
407#[inline(never)]
408fn load_f64_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<f64> {
409 let s = checked_slice(memory, active_bytes, offset, 8)?;
410 Ok(f64::from_le_bytes(to_array(s)?))
411}
412
413#[inline(never)]
414fn store_i32_inner(
415 memory: &mut [u8],
416 active_bytes: usize,
417 offset: usize,
418 value: i32,
419) -> WasmResult<()> {
420 let s = checked_slice_mut(memory, active_bytes, offset, 4)?;
421 s.copy_from_slice(&value.to_le_bytes());
422 Ok(())
423}
424
425#[inline(never)]
426fn store_i64_inner(
427 memory: &mut [u8],
428 active_bytes: usize,
429 offset: usize,
430 value: i64,
431) -> WasmResult<()> {
432 let s = checked_slice_mut(memory, active_bytes, offset, 8)?;
433 s.copy_from_slice(&value.to_le_bytes());
434 Ok(())
435}
436
437#[inline(never)]
438fn store_u8_inner(
439 memory: &mut [u8],
440 active_bytes: usize,
441 offset: usize,
442 value: u8,
443) -> WasmResult<()> {
444 let s = checked_slice_mut(memory, active_bytes, offset, 1)?;
445 s[0] = value;
446 Ok(())
447}
448
449#[inline(never)]
450fn store_u16_inner(
451 memory: &mut [u8],
452 active_bytes: usize,
453 offset: usize,
454 value: u16,
455) -> WasmResult<()> {
456 let s = checked_slice_mut(memory, active_bytes, offset, 2)?;
457 s.copy_from_slice(&value.to_le_bytes());
458 Ok(())
459}
460
461#[inline(never)]
462fn store_f32_inner(
463 memory: &mut [u8],
464 active_bytes: usize,
465 offset: usize,
466 value: f32,
467) -> WasmResult<()> {
468 let s = checked_slice_mut(memory, active_bytes, offset, 4)?;
469 s.copy_from_slice(&value.to_le_bytes());
470 Ok(())
471}
472
473#[inline(never)]
474fn store_f64_inner(
475 memory: &mut [u8],
476 active_bytes: usize,
477 offset: usize,
478 value: f64,
479) -> WasmResult<()> {
480 let s = checked_slice_mut(memory, active_bytes, offset, 8)?;
481 s.copy_from_slice(&value.to_le_bytes());
482 Ok(())
483}
484
485#[inline(never)]
486fn init_data_inner(
487 memory: &mut [u8],
488 active_bytes: usize,
489 offset: usize,
490 data: &[u8],
491) -> WasmResult<()> {
492 let dst = checked_slice_mut(memory, active_bytes, offset, data.len())?;
493 dst.copy_from_slice(data);
494 Ok(())
495}
496
497#[inline(never)]
498fn fill_inner(
499 memory: &mut [u8],
500 active_bytes: usize,
501 dst: usize,
502 val: u8,
503 len: usize,
504) -> WasmResult<()> {
505 let region = checked_slice_mut(memory, active_bytes, dst, len)?;
506 region.fill(val);
507 Ok(())
508}
509
510#[inline(never)]
511fn init_data_partial_inner(
512 memory: &mut [u8],
513 active_bytes: usize,
514 dst: usize,
515 data: &[u8],
516 src_offset: usize,
517 len: usize,
518) -> WasmResult<()> {
519 let src_end = src_offset.checked_add(len).ok_or(WasmTrap::OutOfBounds)?;
520 if src_end > data.len() {
521 return Err(WasmTrap::OutOfBounds);
522 }
523 let src = &data[src_offset..src_end];
524 let dst_region = checked_slice_mut(memory, active_bytes, dst, len)?;
525 dst_region.copy_from_slice(src);
526 Ok(())
527}
528
529#[inline(never)]
537unsafe fn load_i32_unchecked_inner(memory: &[u8], offset: usize) -> i32 {
538 let ptr = memory.as_ptr().add(offset) as *const i32;
539 i32::from_le(ptr.read_unaligned())
540}
541
542#[inline(never)]
543unsafe fn load_i64_unchecked_inner(memory: &[u8], offset: usize) -> i64 {
544 let ptr = memory.as_ptr().add(offset) as *const i64;
545 i64::from_le(ptr.read_unaligned())
546}
547
548#[inline(never)]
549unsafe fn store_i32_unchecked_inner(memory: &mut [u8], offset: usize, value: i32) {
550 let ptr = memory.as_mut_ptr().add(offset) as *mut i32;
551 ptr.write_unaligned(value.to_le());
552}
553
554#[inline(never)]
555unsafe fn store_i64_unchecked_inner(memory: &mut [u8], offset: usize, value: i64) {
556 let ptr = memory.as_mut_ptr().add(offset) as *mut i64;
557 ptr.write_unaligned(value.to_le());
558}
559
560#[cfg(test)]
561mod tests {
562 use super::*;
563
564 type Mem = IsolatedMemory<1>;
566
567 #[test]
568 fn new_initializes_to_zero() {
569 let mem = Mem::try_new(1).unwrap();
570 assert_eq!(mem.page_count(), 1);
571 assert_eq!(mem.active_size(), PAGE_SIZE);
572 assert!(mem.as_slice().iter().all(|&b| b == 0));
573 }
574
575 #[test]
576 fn try_new_fails_if_initial_exceeds_max() {
577 let result = Mem::try_new(2);
578 assert!(result.is_err());
579 assert!(matches!(
580 result,
581 Err(crate::ConstructionError::MemoryInitialPagesExceedsMax { initial: 2, max: 1 })
582 ));
583 }
584
585 #[test]
588 fn grow_success() {
589 let mut mem = IsolatedMemory::<4>::try_new(1).unwrap();
590 assert_eq!(mem.grow(2), 1); assert_eq!(mem.page_count(), 3);
592 }
593
594 #[test]
595 fn grow_to_max() {
596 let mut mem = IsolatedMemory::<4>::try_new(1).unwrap();
597 assert_eq!(mem.grow(3), 1);
598 assert_eq!(mem.page_count(), 4);
599 }
600
601 #[test]
602 fn grow_beyond_max_fails() {
603 let mut mem = IsolatedMemory::<4>::try_new(1).unwrap();
604 assert_eq!(mem.grow(4), -1); assert_eq!(mem.page_count(), 1); }
607
608 #[test]
609 fn grow_zero_is_noop() {
610 let mut mem = Mem::try_new(1).unwrap();
611 assert_eq!(mem.grow(0), 1);
612 assert_eq!(mem.page_count(), 1);
613 }
614
615 #[test]
616 fn grow_zeroes_new_pages() {
617 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
618 assert_eq!(mem.grow(1), 1);
619 let flat = mem.flat();
621 let new_start = PAGE_SIZE;
622 let new_end = 2 * PAGE_SIZE;
623 assert!(flat[new_start..new_end].iter().all(|&b| b == 0));
624 }
625
626 #[test]
627 fn size_returns_page_count() {
628 let mem = IsolatedMemory::<4>::try_new(2).unwrap();
629 assert_eq!(mem.size(), 2);
630 }
631
632 #[test]
635 fn store_load_i32_roundtrip() {
636 let mut mem = Mem::try_new(1).unwrap();
637 mem.store_i32(100, 0x12345678).unwrap();
638 assert_eq!(mem.load_i32(100), Ok(0x12345678));
639 }
640
641 #[test]
642 fn load_i32_out_of_bounds() {
643 let mem = Mem::try_new(1).unwrap();
644 assert!(mem.load_i32(PAGE_SIZE - 4).is_ok());
646 assert_eq!(mem.load_i32(PAGE_SIZE - 3), Err(WasmTrap::OutOfBounds));
647 assert_eq!(mem.load_i32(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
648 }
649
650 #[test]
651 fn store_i32_out_of_bounds() {
652 let mut mem = Mem::try_new(1).unwrap();
653 assert!(mem.store_i32(PAGE_SIZE - 4, 42).is_ok());
654 assert_eq!(mem.store_i32(PAGE_SIZE - 3, 42), Err(WasmTrap::OutOfBounds));
655 }
656
657 #[test]
658 fn load_i32_offset_overflow() {
659 let mem = Mem::try_new(1).unwrap();
660 assert_eq!(mem.load_i32(usize::MAX), Err(WasmTrap::OutOfBounds));
661 }
662
663 #[test]
666 fn store_load_i64_roundtrip() {
667 let mut mem = Mem::try_new(1).unwrap();
668 mem.store_i64(200, 0x0102030405060708i64).unwrap();
669 assert_eq!(mem.load_i64(200), Ok(0x0102030405060708i64));
670 }
671
672 #[test]
673 fn load_i64_out_of_bounds() {
674 let mem = Mem::try_new(1).unwrap();
675 assert!(mem.load_i64(PAGE_SIZE - 8).is_ok());
676 assert_eq!(mem.load_i64(PAGE_SIZE - 7), Err(WasmTrap::OutOfBounds));
677 }
678
679 #[test]
682 fn store_load_u8_roundtrip() {
683 let mut mem = Mem::try_new(1).unwrap();
684 mem.store_u8(0, 0xFF).unwrap();
685 assert_eq!(mem.load_u8(0), Ok(0xFF));
686 }
687
688 #[test]
689 fn load_u8_out_of_bounds() {
690 let mem = Mem::try_new(1).unwrap();
691 assert!(mem.load_u8(PAGE_SIZE - 1).is_ok());
692 assert_eq!(mem.load_u8(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
693 }
694
695 #[test]
698 fn store_load_u16_roundtrip() {
699 let mut mem = Mem::try_new(1).unwrap();
700 mem.store_u16(50, 0xBEEF).unwrap();
701 assert_eq!(mem.load_u16(50), Ok(0xBEEF));
702 }
703
704 #[test]
707 fn store_load_f32_roundtrip() {
708 let mut mem = Mem::try_new(1).unwrap();
709 mem.store_f32(300, core::f32::consts::PI).unwrap();
710 assert_eq!(mem.load_f32(300), Ok(core::f32::consts::PI));
711 }
712
713 #[test]
716 fn store_load_f64_roundtrip() {
717 let mut mem = Mem::try_new(1).unwrap();
718 mem.store_f64(400, core::f64::consts::E).unwrap();
719 assert_eq!(mem.load_f64(400), Ok(core::f64::consts::E));
720 }
721
722 #[test]
725 fn unchecked_i32_roundtrip() {
726 let mut mem = Mem::try_new(1).unwrap();
727 unsafe {
728 mem.store_i32_unchecked(100, 42);
729 assert_eq!(mem.load_i32_unchecked(100), 42);
730 }
731 }
732
733 #[test]
734 fn unchecked_i64_roundtrip() {
735 let mut mem = Mem::try_new(1).unwrap();
736 unsafe {
737 mem.store_i64_unchecked(200, -1i64);
738 assert_eq!(mem.load_i64_unchecked(200), -1i64);
739 }
740 }
741
742 #[test]
745 fn access_beyond_active_pages_traps() {
746 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
748 assert!(mem.load_i32(0).is_ok());
750 assert_eq!(mem.load_i32(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
752 }
753
754 #[test]
755 fn grow_then_access_new_region() {
756 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
757 assert_eq!(mem.load_i32(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
758 mem.grow(1);
759 assert!(mem.load_i32(PAGE_SIZE).is_ok());
761 mem.store_i32(PAGE_SIZE, 99).unwrap();
762 assert_eq!(mem.load_i32(PAGE_SIZE), Ok(99));
763 }
764
765 #[test]
768 fn init_data_writes_bytes() {
769 let mut mem = Mem::try_new(1).unwrap();
770 mem.init_data(10, &[1u8, 2, 3, 4]).unwrap();
771 assert_eq!(mem.load_u8(10).unwrap(), 1);
772 assert_eq!(mem.load_u8(11).unwrap(), 2);
773 assert_eq!(mem.load_u8(12).unwrap(), 3);
774 assert_eq!(mem.load_u8(13).unwrap(), 4);
775 }
776
777 #[test]
778 fn init_data_empty_slice_is_noop() {
779 let mut mem = Mem::try_new(1).unwrap();
780 assert!(mem.init_data(0, &[]).is_ok());
781 }
782
783 #[test]
784 fn init_data_out_of_bounds() {
785 let mut mem = Mem::try_new(1).unwrap();
786 let data = [0u8; 10];
787 assert_eq!(
788 mem.init_data(PAGE_SIZE - 5, &data),
789 Err(WasmTrap::OutOfBounds)
790 );
791 }
792
793 #[test]
794 fn init_data_at_boundary() {
795 let mut mem = Mem::try_new(1).unwrap();
796 let data = [42u8; 4];
797 assert!(mem.init_data(PAGE_SIZE - 4, &data).is_ok());
798 assert_eq!(mem.load_u8(PAGE_SIZE - 1).unwrap(), 42);
799 }
800
801 #[test]
802 fn init_data_overwrites_existing() {
803 let mut mem = Mem::try_new(1).unwrap();
804 mem.store_u8(5, 0xFF).unwrap();
805 mem.init_data(5, &[0xABu8]).unwrap();
806 assert_eq!(mem.load_u8(5).unwrap(), 0xAB);
807 }
808
809 #[test]
812 fn fill_writes_byte_pattern() {
813 let mut mem = Mem::try_new(1).unwrap();
814 mem.fill(100, 0xAB, 5).unwrap();
815 for i in 0..5usize {
816 assert_eq!(mem.load_u8(100 + i).unwrap(), 0xAB);
817 }
818 }
819
820 #[test]
821 fn fill_zero_len_is_noop() {
822 let mut mem = Mem::try_new(1).unwrap();
823 assert!(mem.fill(0, 0xFF, 0).is_ok());
824 }
825
826 #[test]
827 fn fill_out_of_bounds() {
828 let mut mem = Mem::try_new(1).unwrap();
829 assert_eq!(mem.fill(PAGE_SIZE - 3, 0, 10), Err(WasmTrap::OutOfBounds));
830 }
831
832 #[test]
833 fn fill_at_boundary() {
834 let mut mem = Mem::try_new(1).unwrap();
835 assert!(mem.fill(PAGE_SIZE - 4, 0x42, 4).is_ok());
836 assert_eq!(mem.load_u8(PAGE_SIZE - 1).unwrap(), 0x42);
837 }
838
839 #[test]
842 fn init_data_partial_copies_subrange() {
843 let mut mem = Mem::try_new(1).unwrap();
844 let data = b"Hello, World!";
845 mem.init_data_partial(0, data, 7, 5).unwrap(); assert_eq!(mem.load_u8(0).unwrap(), b'W');
847 assert_eq!(mem.load_u8(4).unwrap(), b'd');
848 }
849
850 #[test]
851 fn init_data_partial_zero_len_is_noop() {
852 let mut mem = Mem::try_new(1).unwrap();
853 assert!(mem.init_data_partial(0, b"Hello", 0, 0).is_ok());
854 }
855
856 #[test]
857 fn init_data_partial_full_segment() {
858 let mut mem = Mem::try_new(1).unwrap();
859 mem.init_data_partial(10, b"Hello", 0, 5).unwrap();
860 assert_eq!(mem.load_u8(10).unwrap(), b'H');
861 assert_eq!(mem.load_u8(14).unwrap(), b'o');
862 }
863
864 #[test]
865 fn init_data_partial_src_out_of_bounds() {
866 let mut mem = Mem::try_new(1).unwrap();
867 assert_eq!(
869 mem.init_data_partial(0, b"Hello", 3, 5),
870 Err(WasmTrap::OutOfBounds)
871 );
872 }
873
874 #[test]
875 fn init_data_partial_dst_out_of_bounds() {
876 let mut mem = Mem::try_new(1).unwrap();
877 assert_eq!(
878 mem.init_data_partial(PAGE_SIZE - 2, b"Hello", 0, 5),
879 Err(WasmTrap::OutOfBounds)
880 );
881 }
882
883 #[test]
884 fn init_data_partial_src_offset_overflow() {
885 let mut mem = Mem::try_new(1).unwrap();
886 assert_eq!(
887 mem.init_data_partial(0, b"Hello", usize::MAX, 1),
888 Err(WasmTrap::OutOfBounds)
889 );
890 }
891
892 #[test]
895 fn i32_is_little_endian() {
896 let mut mem = Mem::try_new(1).unwrap();
897 mem.store_i32(0, 0x04030201).unwrap();
898 assert_eq!(mem.load_u8(0), Ok(0x01));
899 assert_eq!(mem.load_u8(1), Ok(0x02));
900 assert_eq!(mem.load_u8(2), Ok(0x03));
901 assert_eq!(mem.load_u8(3), Ok(0x04));
902 }
903}
904
905#[cfg(kani)]
918mod proofs {
919 use super::*;
920
921 #[kani::proof]
924 #[kani::unwind(1)]
925 fn load_i32_never_panics() {
926 let mem = IsolatedMemory::<4>::new(1); let offset: usize = kani::any();
928
929 let result = mem.load_i32(offset);
931
932 if result.is_ok() {
934 kani::assert(
935 offset.checked_add(4).is_some(),
936 "successful load must not overflow",
937 );
938 kani::assert(
939 offset + 4 <= mem.active_size(),
940 "successful load must be within active region",
941 );
942 }
943 }
944
945 #[kani::proof]
947 #[kani::unwind(1)]
948 fn load_i64_never_panics() {
949 let mem = IsolatedMemory::<4>::new(2);
950 let offset: usize = kani::any();
951 let _ = mem.load_i64(offset);
952 }
954
955 #[kani::proof]
957 #[kani::unwind(1)]
958 fn load_u8_never_panics() {
959 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
960 let offset: usize = kani::any();
961 let _ = mem.load_u8(offset);
962 }
963
964 #[kani::proof]
966 #[kani::unwind(1)]
967 fn load_u16_never_panics() {
968 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
969 let offset: usize = kani::any();
970 let _ = mem.load_u16(offset);
971 }
972
973 #[kani::proof]
975 #[kani::unwind(1)]
976 fn load_f32_never_panics() {
977 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
978 let offset: usize = kani::any();
979 let _ = mem.load_f32(offset);
980 }
981
982 #[kani::proof]
984 #[kani::unwind(1)]
985 fn load_f64_never_panics() {
986 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
987 let offset: usize = kani::any();
988 let _ = mem.load_f64(offset);
989 }
990
991 #[kani::proof]
993 #[kani::unwind(1)]
994 fn store_i32_never_panics() {
995 let mut mem = IsolatedMemory::<4>::new(1);
996 let offset: usize = kani::any();
997 let value: i32 = kani::any();
998 let _ = mem.store_i32(offset, value);
999 }
1000
1001 #[kani::proof]
1003 #[kani::unwind(1)]
1004 fn store_i64_never_panics() {
1005 let mut mem = IsolatedMemory::<4>::new(2);
1006 let offset: usize = kani::any();
1007 let value: i64 = kani::any();
1008 let _ = mem.store_i64(offset, value);
1009 }
1010
1011 #[kani::proof]
1013 #[kani::unwind(1)]
1014 fn store_u8_never_panics() {
1015 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
1016 let offset: usize = kani::any();
1017 let value: u8 = kani::any();
1018 let _ = mem.store_u8(offset, value);
1019 }
1020
1021 #[kani::proof]
1023 #[kani::unwind(1)]
1024 fn store_u16_never_panics() {
1025 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
1026 let offset: usize = kani::any();
1027 let value: u16 = kani::any();
1028 let _ = mem.store_u16(offset, value);
1029 }
1030
1031 #[kani::proof]
1033 #[kani::unwind(1)]
1034 fn store_f32_never_panics() {
1035 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
1036 let offset: usize = kani::any();
1037 let value: f32 = kani::any();
1038 let _ = mem.store_f32(offset, value);
1039 }
1040
1041 #[kani::proof]
1043 #[kani::unwind(1)]
1044 fn store_f64_never_panics() {
1045 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
1046 let offset: usize = kani::any();
1047 let value: f64 = kani::any();
1048 let _ = mem.store_f64(offset, value);
1049 }
1050
1051 #[kani::proof]
1053 #[kani::unwind(5)]
1054 fn grow_respects_max_pages() {
1055 let mut mem = IsolatedMemory::<4>::new(1);
1056 let delta: u32 = kani::any();
1057
1058 let old_pages = mem.page_count();
1059 let result = mem.grow(delta);
1060
1061 kani::assert(
1063 mem.page_count() <= 4,
1064 "active_pages must not exceed MAX_PAGES",
1065 );
1066
1067 if result >= 0 {
1069 kani::assert(result == old_pages as i32, "grow returns old page count");
1070 let new_expected = old_pages as u64 + delta as u64;
1072 if new_expected <= 4 {
1073 kani::assert(
1074 mem.page_count() == new_expected as usize,
1075 "grow updates active_pages correctly",
1076 );
1077 }
1078 } else {
1079 kani::assert(
1081 mem.page_count() == old_pages,
1082 "failed grow leaves active_pages unchanged",
1083 );
1084 }
1085 }
1086
1087 #[kani::proof]
1089 #[kani::unwind(4)]
1090 fn grow_fails_beyond_max() {
1091 let mut mem = IsolatedMemory::<4>::new(2);
1092 let result = mem.grow(3);
1094 kani::assert(result == -1, "grow beyond MAX_PAGES returns -1");
1095 kani::assert(mem.page_count() == 2, "failed grow leaves pages unchanged");
1096 }
1097
1098 #[kani::proof]
1100 #[kani::unwind(1)]
1101 fn store_load_roundtrip_i32() {
1102 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1103 let offset: usize = kani::any();
1104 let value: i32 = kani::any();
1105
1106 if mem.store_i32(offset, value).is_ok() {
1108 let loaded = mem.load_i32(offset);
1109 kani::assert(loaded.is_ok(), "load succeeds after successful store");
1110 kani::assert(loaded.unwrap() == value, "load returns the stored value");
1111 }
1112 }
1113
1114 #[kani::proof]
1116 #[kani::unwind(1)]
1117 fn store_load_roundtrip_i64() {
1118 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1119 let offset: usize = kani::any();
1120 let value: i64 = kani::any();
1121
1122 if mem.store_i64(offset, value).is_ok() {
1123 kani::assert(
1124 mem.load_i64(offset) == Ok(value),
1125 "i64 roundtrip preserves value",
1126 );
1127 }
1128 }
1129
1130 #[kani::proof]
1132 #[kani::unwind(1)]
1133 fn store_load_roundtrip_u8() {
1134 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1135 let offset: usize = kani::any();
1136 let value: u8 = kani::any();
1137
1138 if mem.store_u8(offset, value).is_ok() {
1139 kani::assert(
1140 mem.load_u8(offset) == Ok(value),
1141 "u8 roundtrip preserves value",
1142 );
1143 }
1144 }
1145
1146 #[kani::proof]
1148 #[kani::unwind(1)]
1149 fn store_load_roundtrip_u16() {
1150 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1151 let offset: usize = kani::any();
1152 let value: u16 = kani::any();
1153
1154 if mem.store_u16(offset, value).is_ok() {
1155 kani::assert(
1156 mem.load_u16(offset) == Ok(value),
1157 "u16 roundtrip preserves value",
1158 );
1159 }
1160 }
1161
1162 #[kani::proof]
1164 #[kani::unwind(2)]
1165 fn grow_zeroes_new_pages() {
1166 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
1167
1168 let result = mem.grow(1);
1169
1170 if result >= 0 {
1171 let value = mem.load_i32(PAGE_SIZE);
1174 if value.is_ok() {
1175 kani::assert(value.unwrap() == 0, "newly grown page is zero-initialized");
1176 }
1177 }
1178 }
1179
1180 #[kani::proof]
1182 #[kani::unwind(1)]
1183 fn offset_overflow_handled() {
1184 let mem = IsolatedMemory::<1>::try_new(1).unwrap();
1185 let result = mem.load_i32(usize::MAX);
1187 kani::assert(
1188 result == Err(WasmTrap::OutOfBounds),
1189 "overflow offset returns OutOfBounds",
1190 );
1191 }
1192
1193 #[kani::proof]
1195 #[kani::unwind(1)]
1196 fn access_beyond_active_pages_rejected() {
1197 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
1199
1200 let result1 = mem.load_i32(0);
1202 kani::assert(result1.is_ok(), "access within active pages succeeds");
1203
1204 let result2 = mem.load_i32(PAGE_SIZE);
1206 kani::assert(
1207 result2 == Err(WasmTrap::OutOfBounds),
1208 "access beyond active_pages is rejected",
1209 );
1210 }
1211
1212 #[kani::proof]
1214 #[kani::unwind(1)]
1215 fn active_size_invariant() {
1216 let mem = IsolatedMemory::<4>::new(2);
1217 kani::assert(
1218 mem.active_size() == mem.page_count() * PAGE_SIZE,
1219 "active_size = active_pages * PAGE_SIZE",
1220 );
1221 }
1222
1223 #[kani::proof]
1225 #[kani::unwind(1)]
1226 fn size_returns_page_count() {
1227 let mem = IsolatedMemory::<4>::new(3);
1228 kani::assert(
1229 mem.size() == mem.page_count() as i32,
1230 "size() returns active_pages",
1231 );
1232 }
1233
1234 #[kani::proof]
1236 #[kani::unwind(1)]
1237 fn load_success_implies_valid_range() {
1238 let mem = IsolatedMemory::<1>::try_new(1).unwrap();
1239 let offset: usize = kani::any();
1240
1241 let result = mem.load_i32(offset);
1242
1243 if result.is_ok() {
1244 let end = offset.checked_add(4);
1246 kani::assert(end.is_some(), "successful load offset does not overflow");
1247 kani::assert(
1248 end.unwrap() <= mem.active_size(),
1249 "successful load is within bounds",
1250 );
1251 }
1252 }
1253
1254 #[kani::proof]
1256 #[kani::unwind(1)]
1257 fn store_success_implies_valid_range() {
1258 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1259 let offset: usize = kani::any();
1260 let value: i64 = kani::any();
1261
1262 let result = mem.store_i64(offset, value);
1263
1264 if result.is_ok() {
1265 let end = offset.checked_add(8);
1266 kani::assert(end.is_some(), "successful store offset does not overflow");
1267 kani::assert(
1268 end.unwrap() <= mem.active_size(),
1269 "successful store is within bounds",
1270 );
1271 }
1272 }
1273
1274 #[kani::proof]
1276 #[kani::unwind(1)]
1277 fn as_slice_length_correct() {
1278 let mem = IsolatedMemory::<4>::new(2);
1279 let slice = mem.as_slice();
1280 kani::assert(
1281 slice.len() == mem.active_size(),
1282 "as_slice length equals active_size",
1283 );
1284 }
1285
1286 #[kani::proof]
1288 #[kani::unwind(1)]
1289 fn as_mut_slice_length_correct() {
1290 let mut mem = IsolatedMemory::<4>::new(2);
1291 let slice = mem.as_mut_slice();
1292 kani::assert(
1293 slice.len() == mem.active_size(),
1294 "as_mut_slice length equals active_size",
1295 );
1296 }
1297}