1use crate::{WasmResult, WasmTrap, PAGE_SIZE};
16
17pub struct IsolatedMemory<const MAX_PAGES: usize> {
22 pages: [[u8; PAGE_SIZE]; MAX_PAGES],
25 active_pages: usize,
29}
30
31impl<const MAX_PAGES: usize> IsolatedMemory<MAX_PAGES> {
32 #[inline(never)]
37 pub fn try_new(initial_pages: usize) -> Result<Self, crate::ConstructionError> {
38 if initial_pages > MAX_PAGES {
39 return Err(crate::ConstructionError::MemoryInitialPagesExceedsMax {
40 initial: initial_pages,
41 max: MAX_PAGES,
42 });
43 }
44 Ok(Self {
45 pages: [[0u8; PAGE_SIZE]; MAX_PAGES],
46 active_pages: initial_pages,
47 })
48 }
49
50 #[inline(never)]
59 pub fn try_init(
60 slot: &mut core::mem::MaybeUninit<Self>,
61 initial_pages: usize,
62 ) -> Result<(), crate::ConstructionError> {
63 if initial_pages > MAX_PAGES {
64 return Err(crate::ConstructionError::MemoryInitialPagesExceedsMax {
65 initial: initial_pages,
66 max: MAX_PAGES,
67 });
68 }
69 let ptr = slot.as_mut_ptr();
70 unsafe {
74 core::ptr::addr_of_mut!((*ptr).pages).write_bytes(0, 1);
77 core::ptr::addr_of_mut!((*ptr).active_pages).write(initial_pages);
78 }
79 Ok(())
80 }
81
82 #[inline(always)]
84 pub fn page_count(&self) -> usize {
85 self.active_pages
86 }
87
88 #[inline(always)]
90 pub fn active_size(&self) -> usize {
91 self.active_pages * PAGE_SIZE
92 }
93
94 pub fn grow(&mut self, delta: u32) -> i32 {
97 let old = self.active_pages;
98 let new = old.wrapping_add(delta as usize);
99 if new > MAX_PAGES {
100 return -1;
101 }
102 for page in &mut self.pages[old..new] {
104 page.fill(0);
105 }
106 self.active_pages = new;
107 old as i32
108 }
109
110 #[inline(always)]
112 pub fn size(&self) -> i32 {
113 self.active_pages as i32
114 }
115
116 #[inline(always)]
118 fn flat(&self) -> &[u8] {
119 self.pages.as_flattened()
120 }
121
122 #[inline(always)]
124 fn flat_mut(&mut self) -> &mut [u8] {
125 self.pages.as_flattened_mut()
126 }
127
128 pub fn memory_copy(&mut self, dst: u32, src: u32, len: u32) -> WasmResult<()> {
136 let active = self.active_size();
137 let dst = dst as usize;
138 let src = src as usize;
139 let len = len as usize;
140 if src.checked_add(len).is_none_or(|end| end > active)
141 || dst.checked_add(len).is_none_or(|end| end > active)
142 {
143 return Err(WasmTrap::OutOfBounds);
144 }
145 self.flat_mut().copy_within(src..src + len, dst);
146 Ok(())
147 }
148
149 #[inline(always)]
153 pub fn load_i32(&self, offset: usize) -> WasmResult<i32> {
154 load_i32_inner(self.flat(), self.active_size(), offset)
155 }
156
157 #[inline(always)]
159 pub fn load_i64(&self, offset: usize) -> WasmResult<i64> {
160 load_i64_inner(self.flat(), self.active_size(), offset)
161 }
162
163 #[inline(always)]
165 pub fn load_u8(&self, offset: usize) -> WasmResult<u8> {
166 load_u8_inner(self.flat(), self.active_size(), offset)
167 }
168
169 #[inline(always)]
171 pub fn load_u16(&self, offset: usize) -> WasmResult<u16> {
172 load_u16_inner(self.flat(), self.active_size(), offset)
173 }
174
175 #[inline(always)]
177 pub fn load_f32(&self, offset: usize) -> WasmResult<f32> {
178 load_f32_inner(self.flat(), self.active_size(), offset)
179 }
180
181 #[inline(always)]
183 pub fn load_f64(&self, offset: usize) -> WasmResult<f64> {
184 load_f64_inner(self.flat(), self.active_size(), offset)
185 }
186
187 #[inline(always)]
189 pub fn store_i32(&mut self, offset: usize, value: i32) -> WasmResult<()> {
190 let active = self.active_size();
191 store_i32_inner(self.flat_mut(), active, offset, value)
192 }
193
194 #[inline(always)]
196 pub fn store_i64(&mut self, offset: usize, value: i64) -> WasmResult<()> {
197 let active = self.active_size();
198 store_i64_inner(self.flat_mut(), active, offset, value)
199 }
200
201 #[inline(always)]
203 pub fn store_u8(&mut self, offset: usize, value: u8) -> WasmResult<()> {
204 let active = self.active_size();
205 store_u8_inner(self.flat_mut(), active, offset, value)
206 }
207
208 #[inline(always)]
210 pub fn store_u16(&mut self, offset: usize, value: u16) -> WasmResult<()> {
211 let active = self.active_size();
212 store_u16_inner(self.flat_mut(), active, offset, value)
213 }
214
215 #[inline(always)]
217 pub fn store_f32(&mut self, offset: usize, value: f32) -> WasmResult<()> {
218 let active = self.active_size();
219 store_f32_inner(self.flat_mut(), active, offset, value)
220 }
221
222 #[inline(always)]
224 pub fn store_f64(&mut self, offset: usize, value: f64) -> WasmResult<()> {
225 let active = self.active_size();
226 store_f64_inner(self.flat_mut(), active, offset, value)
227 }
228
229 #[inline(always)]
239 pub fn init_data(&mut self, offset: usize, data: &[u8]) -> WasmResult<()> {
240 let active = self.active_size();
241 init_data_inner(self.flat_mut(), active, offset, data)
242 }
243
244 #[inline(always)]
254 pub unsafe fn load_i32_unchecked(&self, offset: usize) -> i32 {
255 load_i32_unchecked_inner(self.flat(), offset)
256 }
257
258 #[inline(always)]
263 pub unsafe fn load_i64_unchecked(&self, offset: usize) -> i64 {
264 load_i64_unchecked_inner(self.flat(), offset)
265 }
266
267 #[inline(always)]
272 pub unsafe fn store_i32_unchecked(&mut self, offset: usize, value: i32) {
273 store_i32_unchecked_inner(self.flat_mut(), offset, value)
274 }
275
276 #[inline(always)]
281 pub unsafe fn store_i64_unchecked(&mut self, offset: usize, value: i64) {
282 store_i64_unchecked_inner(self.flat_mut(), offset, value)
283 }
284
285 #[inline(always)]
287 pub fn as_slice(&self) -> &[u8] {
288 &self.flat()[..self.active_size()]
289 }
290
291 #[inline(always)]
293 pub fn as_mut_slice(&mut self) -> &mut [u8] {
294 let size = self.active_size();
295 &mut self.flat_mut()[..size]
296 }
297}
298
299#[inline(always)]
304fn checked_slice(
305 memory: &[u8],
306 active_bytes: usize,
307 offset: usize,
308 len: usize,
309) -> WasmResult<&[u8]> {
310 let end = offset.checked_add(len).ok_or(WasmTrap::OutOfBounds)?;
311 if end > active_bytes {
312 return Err(WasmTrap::OutOfBounds);
313 }
314 memory.get(offset..end).ok_or(WasmTrap::OutOfBounds)
318}
319
320#[inline(always)]
322fn checked_slice_mut(
323 memory: &mut [u8],
324 active_bytes: usize,
325 offset: usize,
326 len: usize,
327) -> WasmResult<&mut [u8]> {
328 let end = offset.checked_add(len).ok_or(WasmTrap::OutOfBounds)?;
329 if end > active_bytes {
330 return Err(WasmTrap::OutOfBounds);
331 }
332 memory.get_mut(offset..end).ok_or(WasmTrap::OutOfBounds)
333}
334
335#[inline(always)]
338fn to_array<const N: usize>(slice: &[u8]) -> WasmResult<[u8; N]> {
339 slice.try_into().map_err(|_| WasmTrap::OutOfBounds)
340}
341
342#[inline(never)]
351fn load_i32_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<i32> {
352 let s = checked_slice(memory, active_bytes, offset, 4)?;
353 Ok(i32::from_le_bytes(to_array(s)?))
354}
355
356#[inline(never)]
357fn load_i64_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<i64> {
358 let s = checked_slice(memory, active_bytes, offset, 8)?;
359 Ok(i64::from_le_bytes(to_array(s)?))
360}
361
362#[inline(never)]
363fn load_u8_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<u8> {
364 let s = checked_slice(memory, active_bytes, offset, 1)?;
365 Ok(s[0])
366}
367
368#[inline(never)]
369fn load_u16_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<u16> {
370 let s = checked_slice(memory, active_bytes, offset, 2)?;
371 Ok(u16::from_le_bytes(to_array(s)?))
372}
373
374#[inline(never)]
375fn load_f32_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<f32> {
376 let s = checked_slice(memory, active_bytes, offset, 4)?;
377 Ok(f32::from_le_bytes(to_array(s)?))
378}
379
380#[inline(never)]
381fn load_f64_inner(memory: &[u8], active_bytes: usize, offset: usize) -> WasmResult<f64> {
382 let s = checked_slice(memory, active_bytes, offset, 8)?;
383 Ok(f64::from_le_bytes(to_array(s)?))
384}
385
386#[inline(never)]
387fn store_i32_inner(
388 memory: &mut [u8],
389 active_bytes: usize,
390 offset: usize,
391 value: i32,
392) -> WasmResult<()> {
393 let s = checked_slice_mut(memory, active_bytes, offset, 4)?;
394 s.copy_from_slice(&value.to_le_bytes());
395 Ok(())
396}
397
398#[inline(never)]
399fn store_i64_inner(
400 memory: &mut [u8],
401 active_bytes: usize,
402 offset: usize,
403 value: i64,
404) -> WasmResult<()> {
405 let s = checked_slice_mut(memory, active_bytes, offset, 8)?;
406 s.copy_from_slice(&value.to_le_bytes());
407 Ok(())
408}
409
410#[inline(never)]
411fn store_u8_inner(
412 memory: &mut [u8],
413 active_bytes: usize,
414 offset: usize,
415 value: u8,
416) -> WasmResult<()> {
417 let s = checked_slice_mut(memory, active_bytes, offset, 1)?;
418 s[0] = value;
419 Ok(())
420}
421
422#[inline(never)]
423fn store_u16_inner(
424 memory: &mut [u8],
425 active_bytes: usize,
426 offset: usize,
427 value: u16,
428) -> WasmResult<()> {
429 let s = checked_slice_mut(memory, active_bytes, offset, 2)?;
430 s.copy_from_slice(&value.to_le_bytes());
431 Ok(())
432}
433
434#[inline(never)]
435fn store_f32_inner(
436 memory: &mut [u8],
437 active_bytes: usize,
438 offset: usize,
439 value: f32,
440) -> WasmResult<()> {
441 let s = checked_slice_mut(memory, active_bytes, offset, 4)?;
442 s.copy_from_slice(&value.to_le_bytes());
443 Ok(())
444}
445
446#[inline(never)]
447fn store_f64_inner(
448 memory: &mut [u8],
449 active_bytes: usize,
450 offset: usize,
451 value: f64,
452) -> WasmResult<()> {
453 let s = checked_slice_mut(memory, active_bytes, offset, 8)?;
454 s.copy_from_slice(&value.to_le_bytes());
455 Ok(())
456}
457
458#[inline(never)]
459fn init_data_inner(
460 memory: &mut [u8],
461 active_bytes: usize,
462 offset: usize,
463 data: &[u8],
464) -> WasmResult<()> {
465 let dst = checked_slice_mut(memory, active_bytes, offset, data.len())?;
466 dst.copy_from_slice(data);
467 Ok(())
468}
469
470#[inline(never)]
478unsafe fn load_i32_unchecked_inner(memory: &[u8], offset: usize) -> i32 {
479 let ptr = memory.as_ptr().add(offset) as *const i32;
480 i32::from_le(ptr.read_unaligned())
481}
482
483#[inline(never)]
484unsafe fn load_i64_unchecked_inner(memory: &[u8], offset: usize) -> i64 {
485 let ptr = memory.as_ptr().add(offset) as *const i64;
486 i64::from_le(ptr.read_unaligned())
487}
488
489#[inline(never)]
490unsafe fn store_i32_unchecked_inner(memory: &mut [u8], offset: usize, value: i32) {
491 let ptr = memory.as_mut_ptr().add(offset) as *mut i32;
492 ptr.write_unaligned(value.to_le());
493}
494
495#[inline(never)]
496unsafe fn store_i64_unchecked_inner(memory: &mut [u8], offset: usize, value: i64) {
497 let ptr = memory.as_mut_ptr().add(offset) as *mut i64;
498 ptr.write_unaligned(value.to_le());
499}
500
501#[cfg(test)]
502mod tests {
503 use super::*;
504
505 type Mem = IsolatedMemory<1>;
507
508 #[test]
509 fn new_initializes_to_zero() {
510 let mem = Mem::try_new(1).unwrap();
511 assert_eq!(mem.page_count(), 1);
512 assert_eq!(mem.active_size(), PAGE_SIZE);
513 assert!(mem.as_slice().iter().all(|&b| b == 0));
514 }
515
516 #[test]
517 fn try_new_fails_if_initial_exceeds_max() {
518 let result = Mem::try_new(2);
519 assert!(result.is_err());
520 assert!(matches!(
521 result,
522 Err(crate::ConstructionError::MemoryInitialPagesExceedsMax { initial: 2, max: 1 })
523 ));
524 }
525
526 #[test]
529 fn grow_success() {
530 let mut mem = IsolatedMemory::<4>::try_new(1).unwrap();
531 assert_eq!(mem.grow(2), 1); assert_eq!(mem.page_count(), 3);
533 }
534
535 #[test]
536 fn grow_to_max() {
537 let mut mem = IsolatedMemory::<4>::try_new(1).unwrap();
538 assert_eq!(mem.grow(3), 1);
539 assert_eq!(mem.page_count(), 4);
540 }
541
542 #[test]
543 fn grow_beyond_max_fails() {
544 let mut mem = IsolatedMemory::<4>::try_new(1).unwrap();
545 assert_eq!(mem.grow(4), -1); assert_eq!(mem.page_count(), 1); }
548
549 #[test]
550 fn grow_zero_is_noop() {
551 let mut mem = Mem::try_new(1).unwrap();
552 assert_eq!(mem.grow(0), 1);
553 assert_eq!(mem.page_count(), 1);
554 }
555
556 #[test]
557 fn grow_zeroes_new_pages() {
558 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
559 assert_eq!(mem.grow(1), 1);
560 let flat = mem.flat();
562 let new_start = PAGE_SIZE;
563 let new_end = 2 * PAGE_SIZE;
564 assert!(flat[new_start..new_end].iter().all(|&b| b == 0));
565 }
566
567 #[test]
568 fn size_returns_page_count() {
569 let mem = IsolatedMemory::<4>::try_new(2).unwrap();
570 assert_eq!(mem.size(), 2);
571 }
572
573 #[test]
576 fn store_load_i32_roundtrip() {
577 let mut mem = Mem::try_new(1).unwrap();
578 mem.store_i32(100, 0x12345678).unwrap();
579 assert_eq!(mem.load_i32(100), Ok(0x12345678));
580 }
581
582 #[test]
583 fn load_i32_out_of_bounds() {
584 let mem = Mem::try_new(1).unwrap();
585 assert!(mem.load_i32(PAGE_SIZE - 4).is_ok());
587 assert_eq!(mem.load_i32(PAGE_SIZE - 3), Err(WasmTrap::OutOfBounds));
588 assert_eq!(mem.load_i32(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
589 }
590
591 #[test]
592 fn store_i32_out_of_bounds() {
593 let mut mem = Mem::try_new(1).unwrap();
594 assert!(mem.store_i32(PAGE_SIZE - 4, 42).is_ok());
595 assert_eq!(mem.store_i32(PAGE_SIZE - 3, 42), Err(WasmTrap::OutOfBounds));
596 }
597
598 #[test]
599 fn load_i32_offset_overflow() {
600 let mem = Mem::try_new(1).unwrap();
601 assert_eq!(mem.load_i32(usize::MAX), Err(WasmTrap::OutOfBounds));
602 }
603
604 #[test]
607 fn store_load_i64_roundtrip() {
608 let mut mem = Mem::try_new(1).unwrap();
609 mem.store_i64(200, 0x0102030405060708i64).unwrap();
610 assert_eq!(mem.load_i64(200), Ok(0x0102030405060708i64));
611 }
612
613 #[test]
614 fn load_i64_out_of_bounds() {
615 let mem = Mem::try_new(1).unwrap();
616 assert!(mem.load_i64(PAGE_SIZE - 8).is_ok());
617 assert_eq!(mem.load_i64(PAGE_SIZE - 7), Err(WasmTrap::OutOfBounds));
618 }
619
620 #[test]
623 fn store_load_u8_roundtrip() {
624 let mut mem = Mem::try_new(1).unwrap();
625 mem.store_u8(0, 0xFF).unwrap();
626 assert_eq!(mem.load_u8(0), Ok(0xFF));
627 }
628
629 #[test]
630 fn load_u8_out_of_bounds() {
631 let mem = Mem::try_new(1).unwrap();
632 assert!(mem.load_u8(PAGE_SIZE - 1).is_ok());
633 assert_eq!(mem.load_u8(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
634 }
635
636 #[test]
639 fn store_load_u16_roundtrip() {
640 let mut mem = Mem::try_new(1).unwrap();
641 mem.store_u16(50, 0xBEEF).unwrap();
642 assert_eq!(mem.load_u16(50), Ok(0xBEEF));
643 }
644
645 #[test]
648 fn store_load_f32_roundtrip() {
649 let mut mem = Mem::try_new(1).unwrap();
650 mem.store_f32(300, core::f32::consts::PI).unwrap();
651 assert_eq!(mem.load_f32(300), Ok(core::f32::consts::PI));
652 }
653
654 #[test]
657 fn store_load_f64_roundtrip() {
658 let mut mem = Mem::try_new(1).unwrap();
659 mem.store_f64(400, core::f64::consts::E).unwrap();
660 assert_eq!(mem.load_f64(400), Ok(core::f64::consts::E));
661 }
662
663 #[test]
666 fn unchecked_i32_roundtrip() {
667 let mut mem = Mem::try_new(1).unwrap();
668 unsafe {
669 mem.store_i32_unchecked(100, 42);
670 assert_eq!(mem.load_i32_unchecked(100), 42);
671 }
672 }
673
674 #[test]
675 fn unchecked_i64_roundtrip() {
676 let mut mem = Mem::try_new(1).unwrap();
677 unsafe {
678 mem.store_i64_unchecked(200, -1i64);
679 assert_eq!(mem.load_i64_unchecked(200), -1i64);
680 }
681 }
682
683 #[test]
686 fn access_beyond_active_pages_traps() {
687 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
689 assert!(mem.load_i32(0).is_ok());
691 assert_eq!(mem.load_i32(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
693 }
694
695 #[test]
696 fn grow_then_access_new_region() {
697 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
698 assert_eq!(mem.load_i32(PAGE_SIZE), Err(WasmTrap::OutOfBounds));
699 mem.grow(1);
700 assert!(mem.load_i32(PAGE_SIZE).is_ok());
702 mem.store_i32(PAGE_SIZE, 99).unwrap();
703 assert_eq!(mem.load_i32(PAGE_SIZE), Ok(99));
704 }
705
706 #[test]
709 fn init_data_writes_bytes() {
710 let mut mem = Mem::try_new(1).unwrap();
711 mem.init_data(10, &[1u8, 2, 3, 4]).unwrap();
712 assert_eq!(mem.load_u8(10).unwrap(), 1);
713 assert_eq!(mem.load_u8(11).unwrap(), 2);
714 assert_eq!(mem.load_u8(12).unwrap(), 3);
715 assert_eq!(mem.load_u8(13).unwrap(), 4);
716 }
717
718 #[test]
719 fn init_data_empty_slice_is_noop() {
720 let mut mem = Mem::try_new(1).unwrap();
721 assert!(mem.init_data(0, &[]).is_ok());
722 }
723
724 #[test]
725 fn init_data_out_of_bounds() {
726 let mut mem = Mem::try_new(1).unwrap();
727 let data = [0u8; 10];
728 assert_eq!(
729 mem.init_data(PAGE_SIZE - 5, &data),
730 Err(WasmTrap::OutOfBounds)
731 );
732 }
733
734 #[test]
735 fn init_data_at_boundary() {
736 let mut mem = Mem::try_new(1).unwrap();
737 let data = [42u8; 4];
738 assert!(mem.init_data(PAGE_SIZE - 4, &data).is_ok());
739 assert_eq!(mem.load_u8(PAGE_SIZE - 1).unwrap(), 42);
740 }
741
742 #[test]
743 fn init_data_overwrites_existing() {
744 let mut mem = Mem::try_new(1).unwrap();
745 mem.store_u8(5, 0xFF).unwrap();
746 mem.init_data(5, &[0xABu8]).unwrap();
747 assert_eq!(mem.load_u8(5).unwrap(), 0xAB);
748 }
749
750 #[test]
753 fn i32_is_little_endian() {
754 let mut mem = Mem::try_new(1).unwrap();
755 mem.store_i32(0, 0x04030201).unwrap();
756 assert_eq!(mem.load_u8(0), Ok(0x01));
757 assert_eq!(mem.load_u8(1), Ok(0x02));
758 assert_eq!(mem.load_u8(2), Ok(0x03));
759 assert_eq!(mem.load_u8(3), Ok(0x04));
760 }
761}
762
763#[cfg(kani)]
776mod proofs {
777 use super::*;
778
779 #[kani::proof]
782 #[kani::unwind(1)]
783 fn load_i32_never_panics() {
784 let mem = IsolatedMemory::<4>::new(1); let offset: usize = kani::any();
786
787 let result = mem.load_i32(offset);
789
790 if result.is_ok() {
792 kani::assert(
793 offset.checked_add(4).is_some(),
794 "successful load must not overflow",
795 );
796 kani::assert(
797 offset + 4 <= mem.active_size(),
798 "successful load must be within active region",
799 );
800 }
801 }
802
803 #[kani::proof]
805 #[kani::unwind(1)]
806 fn load_i64_never_panics() {
807 let mem = IsolatedMemory::<4>::new(2);
808 let offset: usize = kani::any();
809 let _ = mem.load_i64(offset);
810 }
812
813 #[kani::proof]
815 #[kani::unwind(1)]
816 fn load_u8_never_panics() {
817 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
818 let offset: usize = kani::any();
819 let _ = mem.load_u8(offset);
820 }
821
822 #[kani::proof]
824 #[kani::unwind(1)]
825 fn load_u16_never_panics() {
826 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
827 let offset: usize = kani::any();
828 let _ = mem.load_u16(offset);
829 }
830
831 #[kani::proof]
833 #[kani::unwind(1)]
834 fn load_f32_never_panics() {
835 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
836 let offset: usize = kani::any();
837 let _ = mem.load_f32(offset);
838 }
839
840 #[kani::proof]
842 #[kani::unwind(1)]
843 fn load_f64_never_panics() {
844 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
845 let offset: usize = kani::any();
846 let _ = mem.load_f64(offset);
847 }
848
849 #[kani::proof]
851 #[kani::unwind(1)]
852 fn store_i32_never_panics() {
853 let mut mem = IsolatedMemory::<4>::new(1);
854 let offset: usize = kani::any();
855 let value: i32 = kani::any();
856 let _ = mem.store_i32(offset, value);
857 }
858
859 #[kani::proof]
861 #[kani::unwind(1)]
862 fn store_i64_never_panics() {
863 let mut mem = IsolatedMemory::<4>::new(2);
864 let offset: usize = kani::any();
865 let value: i64 = kani::any();
866 let _ = mem.store_i64(offset, value);
867 }
868
869 #[kani::proof]
871 #[kani::unwind(1)]
872 fn store_u8_never_panics() {
873 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
874 let offset: usize = kani::any();
875 let value: u8 = kani::any();
876 let _ = mem.store_u8(offset, value);
877 }
878
879 #[kani::proof]
881 #[kani::unwind(1)]
882 fn store_u16_never_panics() {
883 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
884 let offset: usize = kani::any();
885 let value: u16 = kani::any();
886 let _ = mem.store_u16(offset, value);
887 }
888
889 #[kani::proof]
891 #[kani::unwind(1)]
892 fn store_f32_never_panics() {
893 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
894 let offset: usize = kani::any();
895 let value: f32 = kani::any();
896 let _ = mem.store_f32(offset, value);
897 }
898
899 #[kani::proof]
901 #[kani::unwind(1)]
902 fn store_f64_never_panics() {
903 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
904 let offset: usize = kani::any();
905 let value: f64 = kani::any();
906 let _ = mem.store_f64(offset, value);
907 }
908
909 #[kani::proof]
911 #[kani::unwind(5)]
912 fn grow_respects_max_pages() {
913 let mut mem = IsolatedMemory::<4>::new(1);
914 let delta: u32 = kani::any();
915
916 let old_pages = mem.page_count();
917 let result = mem.grow(delta);
918
919 kani::assert(
921 mem.page_count() <= 4,
922 "active_pages must not exceed MAX_PAGES",
923 );
924
925 if result >= 0 {
927 kani::assert(result == old_pages as i32, "grow returns old page count");
928 let new_expected = old_pages as u64 + delta as u64;
930 if new_expected <= 4 {
931 kani::assert(
932 mem.page_count() == new_expected as usize,
933 "grow updates active_pages correctly",
934 );
935 }
936 } else {
937 kani::assert(
939 mem.page_count() == old_pages,
940 "failed grow leaves active_pages unchanged",
941 );
942 }
943 }
944
945 #[kani::proof]
947 #[kani::unwind(4)]
948 fn grow_fails_beyond_max() {
949 let mut mem = IsolatedMemory::<4>::new(2);
950 let result = mem.grow(3);
952 kani::assert(result == -1, "grow beyond MAX_PAGES returns -1");
953 kani::assert(mem.page_count() == 2, "failed grow leaves pages unchanged");
954 }
955
956 #[kani::proof]
958 #[kani::unwind(1)]
959 fn store_load_roundtrip_i32() {
960 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
961 let offset: usize = kani::any();
962 let value: i32 = kani::any();
963
964 if mem.store_i32(offset, value).is_ok() {
966 let loaded = mem.load_i32(offset);
967 kani::assert(loaded.is_ok(), "load succeeds after successful store");
968 kani::assert(loaded.unwrap() == value, "load returns the stored value");
969 }
970 }
971
972 #[kani::proof]
974 #[kani::unwind(1)]
975 fn store_load_roundtrip_i64() {
976 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
977 let offset: usize = kani::any();
978 let value: i64 = kani::any();
979
980 if mem.store_i64(offset, value).is_ok() {
981 kani::assert(
982 mem.load_i64(offset) == Ok(value),
983 "i64 roundtrip preserves value",
984 );
985 }
986 }
987
988 #[kani::proof]
990 #[kani::unwind(1)]
991 fn store_load_roundtrip_u8() {
992 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
993 let offset: usize = kani::any();
994 let value: u8 = kani::any();
995
996 if mem.store_u8(offset, value).is_ok() {
997 kani::assert(
998 mem.load_u8(offset) == Ok(value),
999 "u8 roundtrip preserves value",
1000 );
1001 }
1002 }
1003
1004 #[kani::proof]
1006 #[kani::unwind(1)]
1007 fn store_load_roundtrip_u16() {
1008 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1009 let offset: usize = kani::any();
1010 let value: u16 = kani::any();
1011
1012 if mem.store_u16(offset, value).is_ok() {
1013 kani::assert(
1014 mem.load_u16(offset) == Ok(value),
1015 "u16 roundtrip preserves value",
1016 );
1017 }
1018 }
1019
1020 #[kani::proof]
1022 #[kani::unwind(2)]
1023 fn grow_zeroes_new_pages() {
1024 let mut mem = IsolatedMemory::<2>::try_new(1).unwrap();
1025
1026 let result = mem.grow(1);
1027
1028 if result >= 0 {
1029 let value = mem.load_i32(PAGE_SIZE);
1032 if value.is_ok() {
1033 kani::assert(value.unwrap() == 0, "newly grown page is zero-initialized");
1034 }
1035 }
1036 }
1037
1038 #[kani::proof]
1040 #[kani::unwind(1)]
1041 fn offset_overflow_handled() {
1042 let mem = IsolatedMemory::<1>::try_new(1).unwrap();
1043 let result = mem.load_i32(usize::MAX);
1045 kani::assert(
1046 result == Err(WasmTrap::OutOfBounds),
1047 "overflow offset returns OutOfBounds",
1048 );
1049 }
1050
1051 #[kani::proof]
1053 #[kani::unwind(1)]
1054 fn access_beyond_active_pages_rejected() {
1055 let mem = IsolatedMemory::<2>::try_new(1).unwrap();
1057
1058 let result1 = mem.load_i32(0);
1060 kani::assert(result1.is_ok(), "access within active pages succeeds");
1061
1062 let result2 = mem.load_i32(PAGE_SIZE);
1064 kani::assert(
1065 result2 == Err(WasmTrap::OutOfBounds),
1066 "access beyond active_pages is rejected",
1067 );
1068 }
1069
1070 #[kani::proof]
1072 #[kani::unwind(1)]
1073 fn active_size_invariant() {
1074 let mem = IsolatedMemory::<4>::new(2);
1075 kani::assert(
1076 mem.active_size() == mem.page_count() * PAGE_SIZE,
1077 "active_size = active_pages * PAGE_SIZE",
1078 );
1079 }
1080
1081 #[kani::proof]
1083 #[kani::unwind(1)]
1084 fn size_returns_page_count() {
1085 let mem = IsolatedMemory::<4>::new(3);
1086 kani::assert(
1087 mem.size() == mem.page_count() as i32,
1088 "size() returns active_pages",
1089 );
1090 }
1091
1092 #[kani::proof]
1094 #[kani::unwind(1)]
1095 fn load_success_implies_valid_range() {
1096 let mem = IsolatedMemory::<1>::try_new(1).unwrap();
1097 let offset: usize = kani::any();
1098
1099 let result = mem.load_i32(offset);
1100
1101 if result.is_ok() {
1102 let end = offset.checked_add(4);
1104 kani::assert(end.is_some(), "successful load offset does not overflow");
1105 kani::assert(
1106 end.unwrap() <= mem.active_size(),
1107 "successful load is within bounds",
1108 );
1109 }
1110 }
1111
1112 #[kani::proof]
1114 #[kani::unwind(1)]
1115 fn store_success_implies_valid_range() {
1116 let mut mem = IsolatedMemory::<1>::try_new(1).unwrap();
1117 let offset: usize = kani::any();
1118 let value: i64 = kani::any();
1119
1120 let result = mem.store_i64(offset, value);
1121
1122 if result.is_ok() {
1123 let end = offset.checked_add(8);
1124 kani::assert(end.is_some(), "successful store offset does not overflow");
1125 kani::assert(
1126 end.unwrap() <= mem.active_size(),
1127 "successful store is within bounds",
1128 );
1129 }
1130 }
1131
1132 #[kani::proof]
1134 #[kani::unwind(1)]
1135 fn as_slice_length_correct() {
1136 let mem = IsolatedMemory::<4>::new(2);
1137 let slice = mem.as_slice();
1138 kani::assert(
1139 slice.len() == mem.active_size(),
1140 "as_slice length equals active_size",
1141 );
1142 }
1143
1144 #[kani::proof]
1146 #[kani::unwind(1)]
1147 fn as_mut_slice_length_correct() {
1148 let mut mem = IsolatedMemory::<4>::new(2);
1149 let slice = mem.as_mut_slice();
1150 kani::assert(
1151 slice.len() == mem.active_size(),
1152 "as_mut_slice length equals active_size",
1153 );
1154 }
1155}